astroid-2.0.1/0000755000076500000240000000000013324065077013737 5ustar claudiustaff00000000000000astroid-2.0.1/astroid/0000755000076500000240000000000013324065077015404 5ustar claudiustaff00000000000000astroid-2.0.1/astroid/__init__.py0000644000076500000240000001261113324063433017510 0ustar claudiustaff00000000000000# Copyright (c) 2006-2013, 2015 LOGILAB S.A. (Paris, FRANCE) # Copyright (c) 2014 Google, Inc. # Copyright (c) 2014 Eevee (Alex Munroe) # Copyright (c) 2015-2016, 2018 Claudiu Popa # Copyright (c) 2015-2016 Ceridwen # Copyright (c) 2016 Derek Gustafson # Copyright (c) 2016 Moises Lopez # Copyright (c) 2018 Bryce Guinta # Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html # For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER """Python Abstract Syntax Tree New Generation The aim of this module is to provide a common base representation of python source code for projects such as pychecker, pyreverse, pylint... Well, actually the development of this library is essentially governed by pylint's needs. It extends class defined in the python's _ast module with some additional methods and attributes. Instance attributes are added by a builder object, which can either generate extended ast (let's call them astroid ;) by visiting an existent ast tree or by inspecting living object. Methods are added by monkey patching ast classes. Main modules are: * nodes and scoped_nodes for more information about methods and attributes added to different node classes * the manager contains a high level object to get astroid trees from source files and living objects. It maintains a cache of previously constructed tree for quick access * builder contains the class responsible to build astroid trees """ import enum import itertools import os import sys import wrapt _Context = enum.Enum('Context', 'Load Store Del') Load = _Context.Load Store = _Context.Store Del = _Context.Del del _Context from .__pkginfo__ import version as __version__ # WARNING: internal imports order matters ! # pylint: disable=redefined-builtin, wildcard-import # make all exception classes accessible from astroid package from astroid.exceptions import * # make all node classes accessible from astroid package from astroid.nodes import * # trigger extra monkey-patching from astroid import inference # more stuff available from astroid import raw_building from astroid.bases import BaseInstance, Instance, BoundMethod, UnboundMethod from astroid.node_classes import are_exclusive, unpack_infer from astroid.scoped_nodes import builtin_lookup from astroid.builder import parse, extract_node from astroid.util import Uninferable, YES # make a manager instance (borg) accessible from astroid package from astroid.manager import AstroidManager MANAGER = AstroidManager() del AstroidManager # transform utilities (filters and decorator) # pylint: disable=dangerous-default-value @wrapt.decorator def _inference_tip_cached(func, instance, args, kwargs, _cache={}): """Cache decorator used for inference tips""" node = args[0] try: return iter(_cache[func, node]) except KeyError: result = func(*args, **kwargs) # Need to keep an iterator around original, copy = itertools.tee(result) _cache[func, node] = list(copy) return original # pylint: enable=dangerous-default-value def inference_tip(infer_function, raise_on_overwrite=False): """Given an instance specific inference function, return a function to be given to MANAGER.register_transform to set this inference function. :param bool raise_on_overwrite: Raise an `InferenceOverwriteError` if the inference tip will overwrite another. Used for debugging Typical usage .. sourcecode:: python MANAGER.register_transform(Call, inference_tip(infer_named_tuple), predicate) .. Note:: Using an inference tip will override any previously set inference tip for the given node. Use a predicate in the transform to prevent excess overwrites. """ def transform(node, infer_function=infer_function): if (raise_on_overwrite and node._explicit_inference is not None and node._explicit_inference is not infer_function): raise InferenceOverwriteError( "Inference already set to {existing_inference}. " "Trying to overwrite with {new_inference} for {node}" .format(existing_inference=infer_function, new_inference=node._explicit_inference, node=node)) # pylint: disable=no-value-for-parameter node._explicit_inference = _inference_tip_cached(infer_function) return node return transform def register_module_extender(manager, module_name, get_extension_mod): def transform(node): extension_module = get_extension_mod() for name, objs in extension_module.locals.items(): node.locals[name] = objs for obj in objs: if obj.parent is extension_module: obj.parent = node manager.register_transform(Module, transform, lambda n: n.name == module_name) # load brain plugins BRAIN_MODULES_DIR = os.path.join(os.path.dirname(__file__), 'brain') if BRAIN_MODULES_DIR not in sys.path: # add it to the end of the list so user path take precedence sys.path.append(BRAIN_MODULES_DIR) # load modules in this directory for module in os.listdir(BRAIN_MODULES_DIR): if module.endswith('.py'): __import__(module[:-3]) astroid-2.0.1/astroid/__pkginfo__.py0000644000076500000240000000431213324063467020210 0ustar claudiustaff00000000000000# -*- coding: utf-8 -*- # Copyright (c) 2006-2014 LOGILAB S.A. (Paris, FRANCE) # Copyright (c) 2014-2018 Claudiu Popa # Copyright (c) 2014 Google, Inc. # Copyright (c) 2015-2017 Ceridwen # Copyright (c) 2015 Florian Bruhin # Copyright (c) 2015 Radosław Ganczarek # Copyright (c) 2016 Moises Lopez # Copyright (c) 2017 Hugo # Copyright (c) 2017 Łukasz Rogalski # Copyright (c) 2017 Calen Pennington # Copyright (c) 2018 Ashley Whetter # Copyright (c) 2018 Bryce Guinta # Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html # For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER """astroid packaging information""" distname = 'astroid' modname = 'astroid' version = '2.0.1' numversion = tuple(int(elem) for elem in version.split('.') if elem.isdigit()) extras_require = {} install_requires = [ 'lazy_object_proxy', 'six', 'wrapt', 'typing;python_version<"3.5"', 'typed_ast;python_version<"3.7" and implementation_name== "cpython"' ] # pylint: disable=redefined-builtin; why license is a builtin anyway? license = 'LGPL' author = 'Python Code Quality Authority' author_email = 'code-quality@python.org' mailinglist = "mailto://%s" % author_email web = 'https://github.com/PyCQA/astroid' description = "A abstract syntax tree for Python with inference support." classifiers = ["Topic :: Software Development :: Libraries :: Python Modules", "Topic :: Software Development :: Quality Assurance", "Programming Language :: Python", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: PyPy", ] astroid-2.0.1/astroid/_ast.py0000644000076500000240000000225013324063433016675 0ustar claudiustaff00000000000000import ast import sys from collections import namedtuple from typing import Optional _ast_py2 = _ast_py3 = None try: import typed_ast.ast3 as _ast_py3 import typed_ast.ast27 as _ast_py2 except ImportError: pass FunctionType = namedtuple('FunctionType', ['argtypes', 'returns']) def _get_parser_module(parse_python_two: bool = False): if parse_python_two: parser_module = _ast_py2 elif sys.version_info[:2] >= (3, 7): # The typed_ast module doesn't support the full 3.7 syntax yet. # Remove once typed_ast is updated. parser_module = ast else: parser_module = _ast_py3 return parser_module or ast def _parse(string: str, parse_python_two: bool = False): return _get_parser_module(parse_python_two=parse_python_two).parse(string) def parse_function_type_comment(type_comment: str) -> Optional[FunctionType]: """Given a correct type comment, obtain a FunctionType object""" if _ast_py3 is None: return None func_type = _ast_py3.parse(type_comment, "", "func_type") return FunctionType( argtypes=func_type.argtypes, returns=func_type.returns, ) astroid-2.0.1/astroid/arguments.py0000644000076500000240000002673013324063433017765 0ustar claudiustaff00000000000000# Copyright (c) 2015-2016, 2018 Claudiu Popa # Copyright (c) 2015-2016 Ceridwen # Copyright (c) 2018 Bryce Guinta # Copyright (c) 2018 Nick Drozd # Copyright (c) 2018 Anthony Sottile # Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html # For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER from astroid import bases from astroid import context as contextmod from astroid import exceptions from astroid import nodes from astroid import util class CallSite: """Class for understanding arguments passed into a call site It needs a call context, which contains the arguments and the keyword arguments that were passed into a given call site. In order to infer what an argument represents, call :meth:`infer_argument` with the corresponding function node and the argument name. """ def __init__(self, callcontext, argument_context_map=None): if argument_context_map is None: argument_context_map = {} self.argument_context_map = argument_context_map args = callcontext.args keywords = callcontext.keywords self.duplicated_keywords = set() self._unpacked_args = self._unpack_args(args) self._unpacked_kwargs = self._unpack_keywords(keywords) self.positional_arguments = [ arg for arg in self._unpacked_args if arg is not util.Uninferable ] self.keyword_arguments = { key: value for key, value in self._unpacked_kwargs.items() if value is not util.Uninferable } @classmethod def from_call(cls, call_node): """Get a CallSite object from the given Call node.""" callcontext = contextmod.CallContext(call_node.args, call_node.keywords) return cls(callcontext) def has_invalid_arguments(self): """Check if in the current CallSite were passed *invalid* arguments This can mean multiple things. For instance, if an unpacking of an invalid object was passed, then this method will return True. Other cases can be when the arguments can't be inferred by astroid, for example, by passing objects which aren't known statically. """ return len(self.positional_arguments) != len(self._unpacked_args) def has_invalid_keywords(self): """Check if in the current CallSite were passed *invalid* keyword arguments For instance, unpacking a dictionary with integer keys is invalid (**{1:2}), because the keys must be strings, which will make this method to return True. Other cases where this might return True if objects which can't be inferred were passed. """ return len(self.keyword_arguments) != len(self._unpacked_kwargs) def _unpack_keywords(self, keywords): values = {} context = contextmod.InferenceContext() context.extra_context = self.argument_context_map for name, value in keywords: if name is None: # Then it's an unpacking operation (**) try: inferred = next(value.infer(context=context)) except exceptions.InferenceError: values[name] = util.Uninferable continue if not isinstance(inferred, nodes.Dict): # Not something we can work with. values[name] = util.Uninferable continue for dict_key, dict_value in inferred.items: try: dict_key = next(dict_key.infer(context=context)) except exceptions.InferenceError: values[name] = util.Uninferable continue if not isinstance(dict_key, nodes.Const): values[name] = util.Uninferable continue if not isinstance(dict_key.value, str): values[name] = util.Uninferable continue if dict_key.value in values: # The name is already in the dictionary values[dict_key.value] = util.Uninferable self.duplicated_keywords.add(dict_key.value) continue values[dict_key.value] = dict_value else: values[name] = value return values def _unpack_args(self, args): values = [] context = contextmod.InferenceContext() context.extra_context = self.argument_context_map for arg in args: if isinstance(arg, nodes.Starred): try: inferred = next(arg.value.infer(context=context)) except exceptions.InferenceError: values.append(util.Uninferable) continue if inferred is util.Uninferable: values.append(util.Uninferable) continue if not hasattr(inferred, 'elts'): values.append(util.Uninferable) continue values.extend(inferred.elts) else: values.append(arg) return values def infer_argument(self, funcnode, name, context): """infer a function argument value according to the call context Arguments: funcnode: The function being called. name: The name of the argument whose value is being inferred. context: Inference context object """ if name in self.duplicated_keywords: raise exceptions.InferenceError('The arguments passed to {func!r} ' ' have duplicate keywords.', call_site=self, func=funcnode, arg=name, context=context) # Look into the keywords first, maybe it's already there. try: return self.keyword_arguments[name].infer(context) except KeyError: pass # Too many arguments given and no variable arguments. if len(self.positional_arguments) > len(funcnode.args.args): if not funcnode.args.vararg: raise exceptions.InferenceError('Too many positional arguments ' 'passed to {func!r} that does ' 'not have *args.', call_site=self, func=funcnode, arg=name, context=context) positional = self.positional_arguments[:len(funcnode.args.args)] vararg = self.positional_arguments[len(funcnode.args.args):] argindex = funcnode.args.find_argname(name)[0] kwonlyargs = {arg.name for arg in funcnode.args.kwonlyargs} kwargs = { key: value for key, value in self.keyword_arguments.items() if key not in kwonlyargs } # If there are too few positionals compared to # what the function expects to receive, check to see # if the missing positional arguments were passed # as keyword arguments and if so, place them into the # positional args list. if len(positional) < len(funcnode.args.args): for func_arg in funcnode.args.args: if func_arg.name in kwargs: arg = kwargs.pop(func_arg.name) positional.append(arg) if argindex is not None: # 2. first argument of instance/class method if argindex == 0 and funcnode.type in ('method', 'classmethod'): if context.boundnode is not None: boundnode = context.boundnode else: # XXX can do better ? boundnode = funcnode.parent.frame() if isinstance(boundnode, nodes.ClassDef): # Verify that we're accessing a method # of the metaclass through a class, as in # `cls.metaclass_method`. In this case, the # first argument is always the class. method_scope = funcnode.parent.scope() if method_scope is boundnode.metaclass(): return iter((boundnode, )) if funcnode.type == 'method': if not isinstance(boundnode, bases.Instance): boundnode = bases.Instance(boundnode) return iter((boundnode,)) if funcnode.type == 'classmethod': return iter((boundnode,)) # if we have a method, extract one position # from the index, so we'll take in account # the extra parameter represented by `self` or `cls` if funcnode.type in ('method', 'classmethod'): argindex -= 1 # 2. search arg index try: return self.positional_arguments[argindex].infer(context) except IndexError: pass if funcnode.args.kwarg == name: # It wants all the keywords that were passed into # the call site. if self.has_invalid_keywords(): raise exceptions.InferenceError( "Inference failed to find values for all keyword arguments " "to {func!r}: {unpacked_kwargs!r} doesn't correspond to " "{keyword_arguments!r}.", keyword_arguments=self.keyword_arguments, unpacked_kwargs=self._unpacked_kwargs, call_site=self, func=funcnode, arg=name, context=context) kwarg = nodes.Dict(lineno=funcnode.args.lineno, col_offset=funcnode.args.col_offset, parent=funcnode.args) kwarg.postinit([(nodes.const_factory(key), value) for key, value in kwargs.items()]) return iter((kwarg, )) if funcnode.args.vararg == name: # It wants all the args that were passed into # the call site. if self.has_invalid_arguments(): raise exceptions.InferenceError( "Inference failed to find values for all positional " "arguments to {func!r}: {unpacked_args!r} doesn't " "correspond to {positional_arguments!r}.", positional_arguments=self.positional_arguments, unpacked_args=self._unpacked_args, call_site=self, func=funcnode, arg=name, context=context) args = nodes.Tuple(lineno=funcnode.args.lineno, col_offset=funcnode.args.col_offset, parent=funcnode.args) args.postinit(vararg) return iter((args, )) # Check if it's a default parameter. try: return funcnode.args.default_value(name).infer(context) except exceptions.NoDefault: pass raise exceptions.InferenceError('No value found for argument {name} to ' '{func!r}', call_site=self, func=funcnode, arg=name, context=context) astroid-2.0.1/astroid/as_string.py0000644000076500000240000005317013324063433017747 0ustar claudiustaff00000000000000# -*- coding: utf-8 -*- # Copyright (c) 2009-2011, 2013-2014 LOGILAB S.A. (Paris, FRANCE) # Copyright (c) 2010 Daniel Harding # Copyright (c) 2013-2016, 2018 Claudiu Popa # Copyright (c) 2013-2014 Google, Inc. # Copyright (c) 2015-2016 Ceridwen # Copyright (c) 2016 Jared Garst # Copyright (c) 2016 Jakub Wilk # Copyright (c) 2017 Łukasz Rogalski # Copyright (c) 2017 rr- # Copyright (c) 2018 brendanator # Copyright (c) 2018 Nick Drozd # Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html # For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER """This module renders Astroid nodes as string: * :func:`to_code` function return equivalent (hopefully valid) python string * :func:`dump` function return an internal representation of nodes found in the tree, useful for debugging or understanding the tree structure """ import sys # pylint: disable=unused-argument DOC_NEWLINE = '\0' class AsStringVisitor: """Visitor to render an Astroid node as a valid python code string""" def __init__(self, indent): self.indent = indent def __call__(self, node): """Makes this visitor behave as a simple function""" return node.accept(self).replace(DOC_NEWLINE, '\n') def _docs_dedent(self, doc): """Stop newlines in docs being indented by self._stmt_list""" return '\n%s"""%s"""' % (self.indent, doc.replace('\n', DOC_NEWLINE)) def _stmt_list(self, stmts, indent=True): """return a list of nodes to string""" stmts = '\n'.join( [nstr for nstr in [n.accept(self) for n in stmts] if nstr]) if indent: return self.indent + stmts.replace('\n', '\n' + self.indent) return stmts def _precedence_parens(self, node, child, is_left=True): """Wrap child in parens only if required to keep same semantics""" if self._should_wrap(node, child, is_left): return '(%s)' % child.accept(self) return child.accept(self) def _should_wrap(self, node, child, is_left): """Wrap child if: - it has lower precedence - same precedence with position opposite to associativity direction """ node_precedence = node.op_precedence() child_precedence = child.op_precedence() if node_precedence > child_precedence: # 3 * (4 + 5) return True if (node_precedence == child_precedence and is_left != node.op_left_associative()): # 3 - (4 - 5) # (2**3)**4 return True return False ## visit_ methods ########################################### def visit_arguments(self, node): """return an astroid.Function node as string""" return node.format_args() def visit_assignattr(self, node): """return an astroid.AssAttr node as string""" return self.visit_attribute(node) def visit_assert(self, node): """return an astroid.Assert node as string""" if node.fail: return 'assert %s, %s' % (node.test.accept(self), node.fail.accept(self)) return 'assert %s' % node.test.accept(self) def visit_assignname(self, node): """return an astroid.AssName node as string""" return node.name def visit_assign(self, node): """return an astroid.Assign node as string""" lhs = ' = '.join([n.accept(self) for n in node.targets]) return '%s = %s' % (lhs, node.value.accept(self)) def visit_augassign(self, node): """return an astroid.AugAssign node as string""" return '%s %s %s' % (node.target.accept(self), node.op, node.value.accept(self)) def visit_annassign(self, node): """Return an astroid.AugAssign node as string""" target = node.target.accept(self) annotation = node.annotation.accept(self) if node.value is None: return '%s: %s' % (target, annotation) return '%s: %s = %s' % (target, annotation, node.value.accept(self)) def visit_repr(self, node): """return an astroid.Repr node as string""" return '`%s`' % node.value.accept(self) def visit_binop(self, node): """return an astroid.BinOp node as string""" left = self._precedence_parens(node, node.left) right = self._precedence_parens(node, node.right, is_left=False) if node.op == '**': return '%s%s%s' % (left, node.op, right) return '%s %s %s' % (left, node.op, right) def visit_boolop(self, node): """return an astroid.BoolOp node as string""" values = ['%s' % self._precedence_parens(node, n) for n in node.values] return (' %s ' % node.op).join(values) def visit_break(self, node): """return an astroid.Break node as string""" return 'break' def visit_call(self, node): """return an astroid.Call node as string""" expr_str = self._precedence_parens(node, node.func) args = [arg.accept(self) for arg in node.args] if node.keywords: keywords = [kwarg.accept(self) for kwarg in node.keywords] else: keywords = [] args.extend(keywords) return '%s(%s)' % (expr_str, ', '.join(args)) def visit_classdef(self, node): """return an astroid.ClassDef node as string""" decorate = node.decorators.accept(self) if node.decorators else '' bases = ', '.join([n.accept(self) for n in node.bases]) if sys.version_info[0] == 2: bases = '(%s)' % bases if bases else '' else: metaclass = node.metaclass() if metaclass and not node.has_metaclass_hack(): if bases: bases = '(%s, metaclass=%s)' % (bases, metaclass.name) else: bases = '(metaclass=%s)' % metaclass.name else: bases = '(%s)' % bases if bases else '' docs = self._docs_dedent(node.doc) if node.doc else '' return '\n\n%sclass %s%s:%s\n%s\n' % (decorate, node.name, bases, docs, self._stmt_list(node.body)) def visit_compare(self, node): """return an astroid.Compare node as string""" rhs_str = ' '.join([ '%s %s' % (op, self._precedence_parens(node, expr, is_left=False)) for op, expr in node.ops ]) return '%s %s' % (self._precedence_parens(node, node.left), rhs_str) def visit_comprehension(self, node): """return an astroid.Comprehension node as string""" ifs = ''.join([' if %s' % n.accept(self) for n in node.ifs]) return 'for %s in %s%s' % (node.target.accept(self), node.iter.accept(self), ifs) def visit_const(self, node): """return an astroid.Const node as string""" return repr(node.value) def visit_continue(self, node): """return an astroid.Continue node as string""" return 'continue' def visit_delete(self, node): # XXX check if correct """return an astroid.Delete node as string""" return 'del %s' % ', '.join([child.accept(self) for child in node.targets]) def visit_delattr(self, node): """return an astroid.DelAttr node as string""" return self.visit_attribute(node) def visit_delname(self, node): """return an astroid.DelName node as string""" return node.name def visit_decorators(self, node): """return an astroid.Decorators node as string""" return '@%s\n' % '\n@'.join([item.accept(self) for item in node.nodes]) def visit_dict(self, node): """return an astroid.Dict node as string""" return '{%s}' % ', '.join(self._visit_dict(node)) def _visit_dict(self, node): for key, value in node.items: key = key.accept(self) value = value.accept(self) if key == '**': # It can only be a DictUnpack node. yield key + value else: yield '%s: %s' % (key, value) def visit_dictunpack(self, node): return '**' def visit_dictcomp(self, node): """return an astroid.DictComp node as string""" return '{%s: %s %s}' % (node.key.accept(self), node.value.accept(self), ' '.join([n.accept(self) for n in node.generators])) def visit_expr(self, node): """return an astroid.Discard node as string""" return node.value.accept(self) def visit_emptynode(self, node): """dummy method for visiting an Empty node""" return '' def visit_excepthandler(self, node): if node.type: if node.name: excs = 'except %s, %s' % (node.type.accept(self), node.name.accept(self)) else: excs = 'except %s' % node.type.accept(self) else: excs = 'except' return '%s:\n%s' % (excs, self._stmt_list(node.body)) def visit_ellipsis(self, node): """return an astroid.Ellipsis node as string""" return '...' def visit_empty(self, node): """return an Empty node as string""" return '' def visit_exec(self, node): """return an astroid.Exec node as string""" if node.locals: return 'exec %s in %s, %s' % (node.expr.accept(self), node.locals.accept(self), node.globals.accept(self)) if node.globals: return 'exec %s in %s' % (node.expr.accept(self), node.globals.accept(self)) return 'exec %s' % node.expr.accept(self) def visit_extslice(self, node): """return an astroid.ExtSlice node as string""" return ','.join([dim.accept(self) for dim in node.dims]) def visit_for(self, node): """return an astroid.For node as string""" fors = 'for %s in %s:\n%s' % (node.target.accept(self), node.iter.accept(self), self._stmt_list(node.body)) if node.orelse: fors = '%s\nelse:\n%s' % (fors, self._stmt_list(node.orelse)) return fors def visit_importfrom(self, node): """return an astroid.ImportFrom node as string""" return 'from %s import %s' % ('.' * (node.level or 0) + node.modname, _import_string(node.names)) def visit_functiondef(self, node): """return an astroid.Function node as string""" decorate = node.decorators.accept(self) if node.decorators else '' docs = self._docs_dedent(node.doc) if node.doc else '' trailer = ':' if node.returns: return_annotation = '->' + node.returns.as_string() trailer = return_annotation + ":" def_format = "\n%sdef %s(%s)%s%s\n%s" return def_format % (decorate, node.name, node.args.accept(self), trailer, docs, self._stmt_list(node.body)) def visit_generatorexp(self, node): """return an astroid.GeneratorExp node as string""" return '(%s %s)' % (node.elt.accept(self), ' '.join([n.accept(self) for n in node.generators])) def visit_attribute(self, node): """return an astroid.Getattr node as string""" return '%s.%s' % (self._precedence_parens(node, node.expr), node.attrname) def visit_global(self, node): """return an astroid.Global node as string""" return 'global %s' % ', '.join(node.names) def visit_if(self, node): """return an astroid.If node as string""" ifs = ['if %s:\n%s' % (node.test.accept(self), self._stmt_list(node.body))] if node.has_elif_block(): ifs.append('el%s' % self._stmt_list(node.orelse, indent=False)) elif node.orelse: ifs.append('else:\n%s' % self._stmt_list(node.orelse)) return '\n'.join(ifs) def visit_ifexp(self, node): """return an astroid.IfExp node as string""" return '%s if %s else %s' % ( self._precedence_parens(node, node.body, is_left=True), self._precedence_parens(node, node.test, is_left=True), self._precedence_parens(node, node.orelse, is_left=False)) def visit_import(self, node): """return an astroid.Import node as string""" return 'import %s' % _import_string(node.names) def visit_keyword(self, node): """return an astroid.Keyword node as string""" if node.arg is None: return '**%s' % node.value.accept(self) return '%s=%s' % (node.arg, node.value.accept(self)) def visit_lambda(self, node): """return an astroid.Lambda node as string""" args = node.args.accept(self) body = node.body.accept(self) if args: return 'lambda %s: %s' % (args, body) return 'lambda: %s' % body def visit_list(self, node): """return an astroid.List node as string""" return '[%s]' % ', '.join([child.accept(self) for child in node.elts]) def visit_listcomp(self, node): """return an astroid.ListComp node as string""" return '[%s %s]' % (node.elt.accept(self), ' '.join([n.accept(self) for n in node.generators])) def visit_module(self, node): """return an astroid.Module node as string""" docs = '"""%s"""\n\n' % node.doc if node.doc else '' return docs + '\n'.join([n.accept(self) for n in node.body]) + '\n\n' def visit_name(self, node): """return an astroid.Name node as string""" return node.name def visit_pass(self, node): """return an astroid.Pass node as string""" return 'pass' def visit_print(self, node): """return an astroid.Print node as string""" nodes = ', '.join([n.accept(self) for n in node.values]) if not node.nl: nodes = '%s,' % nodes if node.dest: return 'print >> %s, %s' % (node.dest.accept(self), nodes) return 'print %s' % nodes def visit_raise(self, node): """return an astroid.Raise node as string""" if node.exc: if node.inst: if node.tback: return 'raise %s, %s, %s' % (node.exc.accept(self), node.inst.accept(self), node.tback.accept(self)) return 'raise %s, %s' % (node.exc.accept(self), node.inst.accept(self)) return 'raise %s' % node.exc.accept(self) return 'raise' def visit_return(self, node): """return an astroid.Return node as string""" if node.is_tuple_return() and len(node.value.elts) > 1: elts = [child.accept(self) for child in node.value.elts] return 'return %s' % ', '.join(elts) if node.value: return 'return %s' % node.value.accept(self) return 'return' def visit_index(self, node): """return a astroid.Index node as string""" return node.value.accept(self) def visit_set(self, node): """return an astroid.Set node as string""" return '{%s}' % ', '.join([child.accept(self) for child in node.elts]) def visit_setcomp(self, node): """return an astroid.SetComp node as string""" return '{%s %s}' % (node.elt.accept(self), ' '.join([n.accept(self) for n in node.generators])) def visit_slice(self, node): """return a astroid.Slice node as string""" lower = node.lower.accept(self) if node.lower else '' upper = node.upper.accept(self) if node.upper else '' step = node.step.accept(self) if node.step else '' if step: return '%s:%s:%s' % (lower, upper, step) return '%s:%s' % (lower, upper) def visit_subscript(self, node): """return an astroid.Subscript node as string""" return '%s[%s]' % (self._precedence_parens(node, node.value), node.slice.accept(self)) def visit_tryexcept(self, node): """return an astroid.TryExcept node as string""" trys = ['try:\n%s' % self._stmt_list(node.body)] for handler in node.handlers: trys.append(handler.accept(self)) if node.orelse: trys.append('else:\n%s' % self._stmt_list(node.orelse)) return '\n'.join(trys) def visit_tryfinally(self, node): """return an astroid.TryFinally node as string""" return 'try:\n%s\nfinally:\n%s' % (self._stmt_list(node.body), self._stmt_list(node.finalbody)) def visit_tuple(self, node): """return an astroid.Tuple node as string""" if len(node.elts) == 1: return '(%s, )' % node.elts[0].accept(self) return '(%s)' % ', '.join([child.accept(self) for child in node.elts]) def visit_unaryop(self, node): """return an astroid.UnaryOp node as string""" if node.op == 'not': operator = 'not ' else: operator = node.op return '%s%s' % (operator, self._precedence_parens(node, node.operand)) def visit_while(self, node): """return an astroid.While node as string""" whiles = 'while %s:\n%s' % (node.test.accept(self), self._stmt_list(node.body)) if node.orelse: whiles = '%s\nelse:\n%s' % (whiles, self._stmt_list(node.orelse)) return whiles def visit_with(self, node): # 'with' without 'as' is possible """return an astroid.With node as string""" items = ', '.join(('%s' % expr.accept(self)) + (vars and ' as %s' % (vars.accept(self)) or '') for expr, vars in node.items) return 'with %s:\n%s' % (items, self._stmt_list(node.body)) def visit_yield(self, node): """yield an ast.Yield node as string""" yi_val = (" " + node.value.accept(self)) if node.value else "" expr = 'yield' + yi_val if node.parent.is_statement: return expr return "(%s)" % (expr,) def visit_starred(self, node): """return Starred node as string""" return "*" + node.value.accept(self) # These aren't for real AST nodes, but for inference objects. def visit_frozenset(self, node): return node.parent.accept(self) def visit_super(self, node): return node.parent.accept(self) def visit_uninferable(self, node): return str(node) class AsStringVisitor3(AsStringVisitor): """AsStringVisitor3 overwrites some AsStringVisitor methods""" def visit_excepthandler(self, node): if node.type: if node.name: excs = 'except %s as %s' % (node.type.accept(self), node.name.accept(self)) else: excs = 'except %s' % node.type.accept(self) else: excs = 'except' return '%s:\n%s' % (excs, self._stmt_list(node.body)) def visit_nonlocal(self, node): """return an astroid.Nonlocal node as string""" return 'nonlocal %s' % ', '.join(node.names) def visit_raise(self, node): """return an astroid.Raise node as string""" if node.exc: if node.cause: return 'raise %s from %s' % (node.exc.accept(self), node.cause.accept(self)) return 'raise %s' % node.exc.accept(self) return 'raise' def visit_yieldfrom(self, node): """ Return an astroid.YieldFrom node as string. """ yi_val = (" " + node.value.accept(self)) if node.value else "" expr = 'yield from' + yi_val if node.parent.is_statement: return expr return "(%s)" % (expr,) def visit_asyncfunctiondef(self, node): function = super(AsStringVisitor3, self).visit_functiondef(node) return 'async ' + function.strip() def visit_await(self, node): return 'await %s' % node.value.accept(self) def visit_asyncwith(self, node): return 'async %s' % self.visit_with(node) def visit_asyncfor(self, node): return 'async %s' % self.visit_for(node) def visit_joinedstr(self, node): # Special treatment for constants, # as we want to join literals not reprs string = ''.join( value.value if type(value).__name__ == 'Const' else value.accept(self) for value in node.values ) return "f'%s'" % string def visit_formattedvalue(self, node): return '{%s}' % node.value.accept(self) def visit_comprehension(self, node): """return an astroid.Comprehension node as string""" return '%s%s' % ('async ' if node.is_async else '', super(AsStringVisitor3, self).visit_comprehension(node)) def _import_string(names): """return a list of (name, asname) formatted as a string""" _names = [] for name, asname in names: if asname is not None: _names.append('%s as %s' % (name, asname)) else: _names.append(name) return ', '.join(_names) if sys.version_info >= (3, 0): AsStringVisitor = AsStringVisitor3 # This sets the default indent to 4 spaces. to_code = AsStringVisitor(' ') astroid-2.0.1/astroid/bases.py0000644000076500000240000004411613324063433017053 0ustar claudiustaff00000000000000# Copyright (c) 2009-2011, 2013-2014 LOGILAB S.A. (Paris, FRANCE) # Copyright (c) 2012 FELD Boris # Copyright (c) 2014-2018 Claudiu Popa # Copyright (c) 2014 Google, Inc. # Copyright (c) 2014 Eevee (Alex Munroe) # Copyright (c) 2015-2016 Ceridwen # Copyright (c) 2015 Florian Bruhin # Copyright (c) 2016-2017 Derek Gustafson # Copyright (c) 2017 Calen Pennington # Copyright (c) 2018 Bryce Guinta # Copyright (c) 2018 Nick Drozd # Copyright (c) 2018 Daniel Colascione # Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html # For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER """This module contains base classes and functions for the nodes and some inference utils. """ import builtins import collections import sys from astroid import context as contextmod from astroid import exceptions from astroid import util objectmodel = util.lazy_import('interpreter.objectmodel') helpers = util.lazy_import('helpers') BUILTINS = builtins.__name__ manager = util.lazy_import('manager') MANAGER = manager.AstroidManager() if sys.version_info >= (3, 0): # TODO: check if needs special treatment BUILTINS = 'builtins' BOOL_SPECIAL_METHOD = '__bool__' else: BUILTINS = '__builtin__' BOOL_SPECIAL_METHOD = '__nonzero__' PROPERTIES = {BUILTINS + '.property', 'abc.abstractproperty'} # List of possible property names. We use this list in order # to see if a method is a property or not. This should be # pretty reliable and fast, the alternative being to check each # decorator to see if its a real property-like descriptor, which # can be too complicated. # Also, these aren't qualified, because each project can # define them, we shouldn't expect to know every possible # property-like decorator! POSSIBLE_PROPERTIES = {"cached_property", "cachedproperty", "lazyproperty", "lazy_property", "reify", "lazyattribute", "lazy_attribute", "LazyProperty", "lazy", "cache_readonly"} def _is_property(meth): if PROPERTIES.intersection(meth.decoratornames()): return True stripped = {name.split(".")[-1] for name in meth.decoratornames() if name is not util.Uninferable} if any(name in stripped for name in POSSIBLE_PROPERTIES): return True # Lookup for subclasses of *property* if not meth.decorators: return False for decorator in meth.decorators.nodes or (): inferred = helpers.safe_infer(decorator) if inferred is None or inferred is util.Uninferable: continue if inferred.__class__.__name__ == 'ClassDef': for base_class in inferred.bases: module, _ = base_class.lookup(base_class.name) if module.name == BUILTINS and base_class.name == 'property': return True return False class Proxy: """a simple proxy object Note: Subclasses of this object will need a custom __getattr__ if new instance attributes are created. See the Const class """ _proxied = None # proxied object may be set by class or by instance def __init__(self, proxied=None): if proxied is not None: self._proxied = proxied def __getattr__(self, name): if name == '_proxied': return getattr(self.__class__, '_proxied') if name in self.__dict__: return self.__dict__[name] return getattr(self._proxied, name) def infer(self, context=None): yield self def _infer_stmts(stmts, context, frame=None): """Return an iterator on statements inferred by each statement in *stmts*.""" stmt = None inferred = False if context is not None: name = context.lookupname context = context.clone() else: name = None context = contextmod.InferenceContext() for stmt in stmts: if stmt is util.Uninferable: yield stmt inferred = True continue context.lookupname = stmt._infer_name(frame, name) try: for inferred in stmt.infer(context=context): yield inferred inferred = True except exceptions.NameInferenceError: continue except exceptions.InferenceError: yield util.Uninferable inferred = True if not inferred: raise exceptions.InferenceError( 'Inference failed for all members of {stmts!r}.', stmts=stmts, frame=frame, context=context) def _infer_method_result_truth(instance, method_name, context): # Get the method from the instance and try to infer # its return's truth value. meth = next(instance.igetattr(method_name, context=context), None) if meth and hasattr(meth, 'infer_call_result'): if not meth.callable(): return util.Uninferable for value in meth.infer_call_result(instance, context=context): if value is util.Uninferable: return value inferred = next(value.infer(context=context)) return inferred.bool_value() return util.Uninferable class BaseInstance(Proxy): """An instance base class, which provides lookup methods for potential instances.""" special_attributes = None def display_type(self): return 'Instance of' def getattr(self, name, context=None, lookupclass=True): try: values = self._proxied.instance_attr(name, context) except exceptions.AttributeInferenceError as exc: if self.special_attributes and name in self.special_attributes: return [self.special_attributes.lookup(name)] if lookupclass: # Class attributes not available through the instance # unless they are explicitly defined. return self._proxied.getattr(name, context, class_context=False) raise exceptions.AttributeInferenceError( target=self, attribute=name, context=context, ) from exc # since we've no context information, return matching class members as # well if lookupclass: try: return values + self._proxied.getattr(name, context, class_context=False) except exceptions.AttributeInferenceError: pass return values def igetattr(self, name, context=None): """inferred getattr""" if not context: context = contextmod.InferenceContext() try: # avoid recursively inferring the same attr on the same class if context.push((self._proxied, name)): return # XXX frame should be self._proxied, or not ? get_attr = self.getattr(name, context, lookupclass=False) yield from _infer_stmts(self._wrap_attr(get_attr, context), context, frame=self) except exceptions.AttributeInferenceError as error: try: # fallback to class.igetattr since it has some logic to handle # descriptors # But only if the _proxied is the Class. if self._proxied.__class__.__name__ != 'ClassDef': raise exceptions.InferenceError(**vars(error)) from error attrs = self._proxied.igetattr(name, context, class_context=False) yield from self._wrap_attr(attrs, context) except exceptions.AttributeInferenceError as error: raise exceptions.InferenceError(**vars(error)) from error def _wrap_attr(self, attrs, context=None): """wrap bound methods of attrs in a InstanceMethod proxies""" for attr in attrs: if isinstance(attr, UnboundMethod): if _is_property(attr): yield from attr.infer_call_result(self, context) else: yield BoundMethod(attr, self) elif hasattr(attr, 'name') and attr.name == '': # This is a lambda function defined at class level, # since its scope is the underlying _proxied class. # Unfortunately, we can't do an isinstance check here, # because of the circular dependency between astroid.bases # and astroid.scoped_nodes. if attr.statement().scope() == self._proxied: if attr.args.args and attr.args.args[0].name == 'self': yield BoundMethod(attr, self) continue yield attr else: yield attr def infer_call_result(self, caller, context=None): """infer what a class instance is returning when called""" context = contextmod.bind_context_to_node(context, self) inferred = False for node in self._proxied.igetattr('__call__', context): if node is util.Uninferable or not node.callable(): continue for res in node.infer_call_result(caller, context): inferred = True yield res if not inferred: raise exceptions.InferenceError(node=self, caller=caller, context=context) class Instance(BaseInstance): """A special node representing a class instance.""" # pylint: disable=unnecessary-lambda special_attributes = util.lazy_descriptor(lambda: objectmodel.InstanceModel()) def __repr__(self): return '' % (self._proxied.root().name, self._proxied.name, id(self)) def __str__(self): return 'Instance of %s.%s' % (self._proxied.root().name, self._proxied.name) def callable(self): try: self._proxied.getattr('__call__', class_context=False) return True except exceptions.AttributeInferenceError: return False def pytype(self): return self._proxied.qname() def display_type(self): return 'Instance of' def bool_value(self): """Infer the truth value for an Instance The truth value of an instance is determined by these conditions: * if it implements __bool__ on Python 3 or __nonzero__ on Python 2, then its bool value will be determined by calling this special method and checking its result. * when this method is not defined, __len__() is called, if it is defined, and the object is considered true if its result is nonzero. If a class defines neither __len__() nor __bool__(), all its instances are considered true. """ context = contextmod.InferenceContext() context.callcontext = contextmod.CallContext(args=[]) context.boundnode = self try: result = _infer_method_result_truth(self, BOOL_SPECIAL_METHOD, context) except (exceptions.InferenceError, exceptions.AttributeInferenceError): # Fallback to __len__. try: result = _infer_method_result_truth(self, '__len__', context) except (exceptions.AttributeInferenceError, exceptions.InferenceError): return True return result # This is set in inference.py. def getitem(self, index, context=None): pass class UnboundMethod(Proxy): """a special node representing a method not bound to an instance""" # pylint: disable=unnecessary-lambda special_attributes = util.lazy_descriptor(lambda: objectmodel.UnboundMethodModel()) def __repr__(self): frame = self._proxied.parent.frame() return '<%s %s of %s at 0x%s' % (self.__class__.__name__, self._proxied.name, frame.qname(), id(self)) def implicit_parameters(self): return 0 def is_bound(self): return False def getattr(self, name, context=None): if name in self.special_attributes: return [self.special_attributes.lookup(name)] return self._proxied.getattr(name, context) def igetattr(self, name, context=None): if name in self.special_attributes: return iter((self.special_attributes.lookup(name), )) return self._proxied.igetattr(name, context) def infer_call_result(self, caller, context): """ The boundnode of the regular context with a function called on ``object.__new__`` will be of type ``object``, which is incorrect for the argument in general. If no context is given the ``object.__new__`` call argument will correctly inferred except when inside a call that requires the additonal context (such as a classmethod) of the boundnode to determine which class the method was called from """ # If we're unbound method __new__ of builtin object, the result is an # instance of the class given as first argument. if (self._proxied.name == '__new__' and self._proxied.parent.frame().qname() == '%s.object' % BUILTINS): if caller.args: node_context = context.extra_context.get(caller.args[0]) infer = caller.args[0].infer(context=node_context) else: infer = [] return (Instance(x) if x is not util.Uninferable else x for x in infer) return self._proxied.infer_call_result(caller, context) def bool_value(self): return True class BoundMethod(UnboundMethod): """a special node representing a method bound to an instance""" # pylint: disable=unnecessary-lambda special_attributes = util.lazy_descriptor(lambda: objectmodel.BoundMethodModel()) def __init__(self, proxy, bound): UnboundMethod.__init__(self, proxy) self.bound = bound def implicit_parameters(self): return 1 def is_bound(self): return True def _infer_type_new_call(self, caller, context): """Try to infer what type.__new__(mcs, name, bases, attrs) returns. In order for such call to be valid, the metaclass needs to be a subtype of ``type``, the name needs to be a string, the bases needs to be a tuple of classes """ from astroid import node_classes # Verify the metaclass mcs = next(caller.args[0].infer(context=context)) if mcs.__class__.__name__ != 'ClassDef': # Not a valid first argument. return None if not mcs.is_subtype_of("%s.type" % BUILTINS): # Not a valid metaclass. return None # Verify the name name = next(caller.args[1].infer(context=context)) if name.__class__.__name__ != 'Const': # Not a valid name, needs to be a const. return None if not isinstance(name.value, str): # Needs to be a string. return None # Verify the bases bases = next(caller.args[2].infer(context=context)) if bases.__class__.__name__ != 'Tuple': # Needs to be a tuple. return None inferred_bases = [next(elt.infer(context=context)) for elt in bases.elts] if any(base.__class__.__name__ != 'ClassDef' for base in inferred_bases): # All the bases needs to be Classes return None # Verify the attributes. attrs = next(caller.args[3].infer(context=context)) if attrs.__class__.__name__ != 'Dict': # Needs to be a dictionary. return None cls_locals = collections.defaultdict(list) for key, value in attrs.items: key = next(key.infer(context=context)) value = next(value.infer(context=context)) # Ignore non string keys if (key.__class__.__name__ == 'Const' and isinstance(key.value, str)): cls_locals[key.value].append(value) # Build the class from now. cls = mcs.__class__(name=name.value, lineno=caller.lineno, col_offset=caller.col_offset, parent=caller) empty = node_classes.Pass() cls.postinit(bases=bases.elts, body=[empty], decorators=[], newstyle=True, metaclass=mcs, keywords=[]) cls.locals = cls_locals return cls def infer_call_result(self, caller, context=None): context = contextmod.bind_context_to_node(context, self.bound) if (self.bound.__class__.__name__ == 'ClassDef' and self.bound.name == 'type' and self.name == '__new__' and len(caller.args) == 4): # Check if we have an ``type.__new__(mcs, name, bases, attrs)`` call. new_cls = self._infer_type_new_call(caller, context) if new_cls: return iter((new_cls, )) return super(BoundMethod, self).infer_call_result(caller, context) def bool_value(self): return True class Generator(BaseInstance): """a special node representing a generator. Proxied class is set once for all in raw_building. """ # pylint: disable=unnecessary-lambda special_attributes = util.lazy_descriptor(lambda: objectmodel.GeneratorModel()) # pylint: disable=super-init-not-called def __init__(self, parent=None): self.parent = parent def callable(self): return False def pytype(self): return '%s.generator' % BUILTINS def display_type(self): return 'Generator' def bool_value(self): return True def __repr__(self): return '' % (self._proxied.name, self.lineno, id(self)) def __str__(self): return 'Generator(%s)' % (self._proxied.name) astroid-2.0.1/astroid/brain/0000755000076500000240000000000013324065077016477 5ustar claudiustaff00000000000000astroid-2.0.1/astroid/brain/brain_attrs.py0000644000076500000240000000356513324063433021364 0ustar claudiustaff00000000000000# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html # For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER """ Astroid hook for the attrs library Without this hook pylint reports unsupported-assignment-operation for atrrs classes """ import astroid from astroid import MANAGER ATTR_IB = 'attr.ib' def is_decorated_with_attrs( node, decorator_names=('attr.s', 'attr.attrs', 'attr.attributes')): """Return True if a decorated node has an attr decorator applied.""" if not node.decorators: return False for decorator_attribute in node.decorators.nodes: if isinstance(decorator_attribute, astroid.Call): # decorator with arguments decorator_attribute = decorator_attribute.func if decorator_attribute.as_string() in decorator_names: return True return False def attr_attributes_transform(node): """Given that the ClassNode has an attr decorator, rewrite class attributes as instance attributes """ # Astroid can't infer this attribute properly # Prevents https://github.com/PyCQA/pylint/issues/1884 node.locals["__attrs_attrs__"] = [astroid.Unknown(parent=node)] for cdefbodynode in node.body: if not isinstance(cdefbodynode, astroid.Assign): continue if isinstance(cdefbodynode.value, astroid.Call): if cdefbodynode.value.func.as_string() != ATTR_IB: continue else: continue for target in cdefbodynode.targets: rhs_node = astroid.Unknown( lineno=cdefbodynode.lineno, col_offset=cdefbodynode.col_offset, parent=cdefbodynode ) node.locals[target.name] = [rhs_node] MANAGER.register_transform( astroid.ClassDef, attr_attributes_transform, is_decorated_with_attrs) astroid-2.0.1/astroid/brain/brain_builtin_inference.py0000644000076500000240000006524413324063433023715 0ustar claudiustaff00000000000000# Copyright (c) 2014-2018 Claudiu Popa # Copyright (c) 2014-2015 LOGILAB S.A. (Paris, FRANCE) # Copyright (c) 2015-2016 Ceridwen # Copyright (c) 2015 Rene Zhang # Copyright (c) 2018 Bryce Guinta # Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html # For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER """Astroid hooks for various builtins.""" from functools import partial import sys from textwrap import dedent import six from astroid import ( MANAGER, Instance, UseInferenceDefault, AttributeInferenceError, inference_tip, InferenceError, NameInferenceError, AstroidTypeError, MroError, ) from astroid import arguments from astroid.builder import AstroidBuilder from astroid import helpers from astroid import nodes from astroid import objects from astroid import scoped_nodes from astroid import util OBJECT_DUNDER_NEW = 'object.__new__' def _extend_str(class_node, rvalue): """function to extend builtin str/unicode class""" code = dedent(''' class whatever(object): def join(self, iterable): return {rvalue} def replace(self, old, new, count=None): return {rvalue} def format(self, *args, **kwargs): return {rvalue} def encode(self, encoding='ascii', errors=None): return '' def decode(self, encoding='ascii', errors=None): return u'' def capitalize(self): return {rvalue} def title(self): return {rvalue} def lower(self): return {rvalue} def upper(self): return {rvalue} def swapcase(self): return {rvalue} def index(self, sub, start=None, end=None): return 0 def find(self, sub, start=None, end=None): return 0 def count(self, sub, start=None, end=None): return 0 def strip(self, chars=None): return {rvalue} def lstrip(self, chars=None): return {rvalue} def rstrip(self, chars=None): return {rvalue} def rjust(self, width, fillchar=None): return {rvalue} def center(self, width, fillchar=None): return {rvalue} def ljust(self, width, fillchar=None): return {rvalue} ''') code = code.format(rvalue=rvalue) fake = AstroidBuilder(MANAGER).string_build(code)['whatever'] for method in fake.mymethods(): method.parent = class_node method.lineno = None method.col_offset = None if '__class__' in method.locals: method.locals['__class__'] = [class_node] class_node.locals[method.name] = [method] method.parent = class_node def extend_builtins(class_transforms): from astroid.bases import BUILTINS builtin_ast = MANAGER.astroid_cache[BUILTINS] for class_name, transform in class_transforms.items(): transform(builtin_ast[class_name]) if sys.version_info > (3, 0): extend_builtins({'bytes': partial(_extend_str, rvalue="b''"), 'str': partial(_extend_str, rvalue="''")}) else: extend_builtins({'str': partial(_extend_str, rvalue="''"), 'unicode': partial(_extend_str, rvalue="u''")}) def _builtin_filter_predicate(node, builtin_name): if isinstance(node.func, nodes.Name) and node.func.name == builtin_name: return True if isinstance(node.func, nodes.Attribute): return (node.func.attrname == 'fromkeys' and isinstance(node.func.expr, nodes.Name) and node.func.expr.name == 'dict') return False def register_builtin_transform(transform, builtin_name): """Register a new transform function for the given *builtin_name*. The transform function must accept two parameters, a node and an optional context. """ def _transform_wrapper(node, context=None): result = transform(node, context=context) if result: if not result.parent: # Let the transformation function determine # the parent for its result. Otherwise, # we set it to be the node we transformed from. result.parent = node if result.lineno is None: result.lineno = node.lineno if result.col_offset is None: result.col_offset = node.col_offset return iter([result]) MANAGER.register_transform( nodes.Call, inference_tip(_transform_wrapper), partial(_builtin_filter_predicate, builtin_name=builtin_name), ) def _generic_inference(node, context, node_type, transform): args = node.args if not args: return node_type() if len(node.args) > 1: raise UseInferenceDefault() arg, = args transformed = transform(arg) if not transformed: try: inferred = next(arg.infer(context=context)) except (InferenceError, StopIteration): raise UseInferenceDefault() if inferred is util.Uninferable: raise UseInferenceDefault() transformed = transform(inferred) if not transformed or transformed is util.Uninferable: raise UseInferenceDefault() return transformed def _generic_transform(arg, klass, iterables, build_elts): if isinstance(arg, klass): return arg elif isinstance(arg, iterables): if not all(isinstance(elt, nodes.Const) for elt in arg.elts): raise UseInferenceDefault() elts = [elt.value for elt in arg.elts] elif isinstance(arg, nodes.Dict): if not all(isinstance(elt[0], nodes.Const) for elt in arg.items): raise UseInferenceDefault() elts = [item[0].value for item in arg.items] elif (isinstance(arg, nodes.Const) and isinstance(arg.value, (six.string_types, six.binary_type))): elts = arg.value else: return return klass.from_constants(elts=build_elts(elts)) def _infer_builtin(node, context, klass=None, iterables=None, build_elts=None): transform_func = partial( _generic_transform, klass=klass, iterables=iterables, build_elts=build_elts) return _generic_inference(node, context, klass, transform_func) # pylint: disable=invalid-name infer_tuple = partial( _infer_builtin, klass=nodes.Tuple, iterables=(nodes.List, nodes.Set, objects.FrozenSet, objects.DictItems, objects.DictKeys, objects.DictValues), build_elts=tuple) infer_list = partial( _infer_builtin, klass=nodes.List, iterables=(nodes.Tuple, nodes.Set, objects.FrozenSet, objects.DictItems, objects.DictKeys, objects.DictValues), build_elts=list) infer_set = partial( _infer_builtin, klass=nodes.Set, iterables=(nodes.List, nodes.Tuple, objects.FrozenSet, objects.DictKeys), build_elts=set) infer_frozenset = partial( _infer_builtin, klass=objects.FrozenSet, iterables=(nodes.List, nodes.Tuple, nodes.Set, objects.FrozenSet, objects.DictKeys), build_elts=frozenset) def _get_elts(arg, context): is_iterable = lambda n: isinstance(n, (nodes.List, nodes.Tuple, nodes.Set)) try: inferred = next(arg.infer(context)) except (InferenceError, NameInferenceError): raise UseInferenceDefault() if isinstance(inferred, nodes.Dict): items = inferred.items elif is_iterable(inferred): items = [] for elt in inferred.elts: # If an item is not a pair of two items, # then fallback to the default inference. # Also, take in consideration only hashable items, # tuples and consts. We are choosing Names as well. if not is_iterable(elt): raise UseInferenceDefault() if len(elt.elts) != 2: raise UseInferenceDefault() if not isinstance(elt.elts[0], (nodes.Tuple, nodes.Const, nodes.Name)): raise UseInferenceDefault() items.append(tuple(elt.elts)) else: raise UseInferenceDefault() return items def infer_dict(node, context=None): """Try to infer a dict call to a Dict node. The function treats the following cases: * dict() * dict(mapping) * dict(iterable) * dict(iterable, **kwargs) * dict(mapping, **kwargs) * dict(**kwargs) If a case can't be inferred, we'll fallback to default inference. """ call = arguments.CallSite.from_call(node) if call.has_invalid_arguments() or call.has_invalid_keywords(): raise UseInferenceDefault args = call.positional_arguments kwargs = list(call.keyword_arguments.items()) if not args and not kwargs: # dict() return nodes.Dict() elif kwargs and not args: # dict(a=1, b=2, c=4) items = [(nodes.Const(key), value) for key, value in kwargs] elif len(args) == 1 and kwargs: # dict(some_iterable, b=2, c=4) elts = _get_elts(args[0], context) keys = [(nodes.Const(key), value) for key, value in kwargs] items = elts + keys elif len(args) == 1: items = _get_elts(args[0], context) else: raise UseInferenceDefault() value = nodes.Dict(col_offset=node.col_offset, lineno=node.lineno, parent=node.parent) value.postinit(items) return value def infer_super(node, context=None): """Understand super calls. There are some restrictions for what can be understood: * unbounded super (one argument form) is not understood. * if the super call is not inside a function (classmethod or method), then the default inference will be used. * if the super arguments can't be inferred, the default inference will be used. """ if len(node.args) == 1: # Ignore unbounded super. raise UseInferenceDefault scope = node.scope() if not isinstance(scope, nodes.FunctionDef): # Ignore non-method uses of super. raise UseInferenceDefault if scope.type not in ('classmethod', 'method'): # Not interested in staticmethods. raise UseInferenceDefault cls = scoped_nodes.get_wrapping_class(scope) if not len(node.args): mro_pointer = cls # In we are in a classmethod, the interpreter will fill # automatically the class as the second argument, not an instance. if scope.type == 'classmethod': mro_type = cls else: mro_type = cls.instantiate_class() else: try: mro_pointer = next(node.args[0].infer(context=context)) except InferenceError: raise UseInferenceDefault try: mro_type = next(node.args[1].infer(context=context)) except InferenceError: raise UseInferenceDefault if mro_pointer is util.Uninferable or mro_type is util.Uninferable: # No way we could understand this. raise UseInferenceDefault super_obj = objects.Super(mro_pointer=mro_pointer, mro_type=mro_type, self_class=cls, scope=scope) super_obj.parent = node return super_obj def _infer_getattr_args(node, context): if len(node.args) not in (2, 3): # Not a valid getattr call. raise UseInferenceDefault try: obj = next(node.args[0].infer(context=context)) attr = next(node.args[1].infer(context=context)) except InferenceError: raise UseInferenceDefault if obj is util.Uninferable or attr is util.Uninferable: # If one of the arguments is something we can't infer, # then also make the result of the getattr call something # which is unknown. return util.Uninferable, util.Uninferable is_string = (isinstance(attr, nodes.Const) and isinstance(attr.value, six.string_types)) if not is_string: raise UseInferenceDefault return obj, attr.value def infer_getattr(node, context=None): """Understand getattr calls If one of the arguments is an Uninferable object, then the result will be an Uninferable object. Otherwise, the normal attribute lookup will be done. """ obj, attr = _infer_getattr_args(node, context) if obj is util.Uninferable or attr is util.Uninferable or not hasattr(obj, 'igetattr'): return util.Uninferable try: return next(obj.igetattr(attr, context=context)) except (StopIteration, InferenceError, AttributeInferenceError): if len(node.args) == 3: # Try to infer the default and return it instead. try: return next(node.args[2].infer(context=context)) except InferenceError: raise UseInferenceDefault raise UseInferenceDefault def infer_hasattr(node, context=None): """Understand hasattr calls This always guarantees three possible outcomes for calling hasattr: Const(False) when we are sure that the object doesn't have the intended attribute, Const(True) when we know that the object has the attribute and Uninferable when we are unsure of the outcome of the function call. """ try: obj, attr = _infer_getattr_args(node, context) if obj is util.Uninferable or attr is util.Uninferable or not hasattr(obj, 'getattr'): return util.Uninferable obj.getattr(attr, context=context) except UseInferenceDefault: # Can't infer something from this function call. return util.Uninferable except AttributeInferenceError: # Doesn't have it. return nodes.Const(False) return nodes.Const(True) def infer_callable(node, context=None): """Understand callable calls This follows Python's semantics, where an object is callable if it provides an attribute __call__, even though that attribute is something which can't be called. """ if len(node.args) != 1: # Invalid callable call. raise UseInferenceDefault argument = node.args[0] try: inferred = next(argument.infer(context=context)) except InferenceError: return util.Uninferable if inferred is util.Uninferable: return util.Uninferable return nodes.Const(inferred.callable()) def infer_bool(node, context=None): """Understand bool calls.""" if len(node.args) > 1: # Invalid bool call. raise UseInferenceDefault if not node.args: return nodes.Const(False) argument = node.args[0] try: inferred = next(argument.infer(context=context)) except InferenceError: return util.Uninferable if inferred is util.Uninferable: return util.Uninferable bool_value = inferred.bool_value() if bool_value is util.Uninferable: return util.Uninferable return nodes.Const(bool_value) def infer_type(node, context=None): """Understand the one-argument form of *type*.""" if len(node.args) != 1: raise UseInferenceDefault return helpers.object_type(node.args[0], context) def infer_slice(node, context=None): """Understand `slice` calls.""" args = node.args if not 0 < len(args) <= 3: raise UseInferenceDefault args = list(map(helpers.safe_infer, args)) for arg in args: if not arg or arg is util.Uninferable: raise UseInferenceDefault if not isinstance(arg, nodes.Const): raise UseInferenceDefault if not isinstance(arg.value, (type(None), int)): raise UseInferenceDefault if len(args) < 3: # Make sure we have 3 arguments. args.extend([None] * (3 - len(args))) slice_node = nodes.Slice(lineno=node.lineno, col_offset=node.col_offset, parent=node.parent) slice_node.postinit(*args) return slice_node def _infer_object__new__decorator(node, context=None): # Instantiate class immediately # since that's what @object.__new__ does return iter((node.instantiate_class(),)) def _infer_object__new__decorator_check(node): """Predicate before inference_tip Check if the given ClassDef has a @object.__new__ decorator """ if not node.decorators: return False for decorator in node.decorators.nodes: if isinstance(decorator, nodes.Attribute): if decorator.as_string() == OBJECT_DUNDER_NEW: return True return False def infer_issubclass(callnode, context=None): """Infer issubclass() calls :param nodes.Call callnode: a `issubclass` call :param InferenceContext: the context for the inference :rtype nodes.Const: Boolean Const value of the `issubclass` call :raises UseInferenceDefault: If the node cannot be inferred """ call = arguments.CallSite.from_call(callnode) if call.keyword_arguments: # issubclass doesn't support keyword arguments raise UseInferenceDefault("TypeError: issubclass() takes no keyword arguments") if len(call.positional_arguments) != 2: raise UseInferenceDefault( "Expected two arguments, got {count}" .format(count=len(call.positional_arguments))) # The left hand argument is the obj to be checked obj_node, class_or_tuple_node = call.positional_arguments try: obj_type = next(obj_node.infer(context=context)) except InferenceError as exc: raise UseInferenceDefault from exc if not isinstance(obj_type, nodes.ClassDef): raise UseInferenceDefault("TypeError: arg 1 must be class") # The right hand argument is the class(es) that the given # object is to be checked against. try: class_container = _class_or_tuple_to_container( class_or_tuple_node, context=context) except InferenceError as exc: raise UseInferenceDefault from exc try: issubclass_bool = helpers.object_issubclass(obj_type, class_container, context) except AstroidTypeError as exc: raise UseInferenceDefault("TypeError: " + str(exc)) from exc except MroError as exc: raise UseInferenceDefault from exc return nodes.Const(issubclass_bool) def infer_isinstance(callnode, context=None): """Infer isinstance calls :param nodes.Call callnode: an isinstance call :param InferenceContext: context for call (currently unused but is a common interface for inference) :rtype nodes.Const: Boolean Const value of isinstance call :raises UseInferenceDefault: If the node cannot be inferred """ call = arguments.CallSite.from_call(callnode) if call.keyword_arguments: # isinstance doesn't support keyword arguments raise UseInferenceDefault("TypeError: isinstance() takes no keyword arguments") if len(call.positional_arguments) != 2: raise UseInferenceDefault( "Expected two arguments, got {count}" .format(count=len(call.positional_arguments))) # The left hand argument is the obj to be checked obj_node, class_or_tuple_node = call.positional_arguments # The right hand argument is the class(es) that the given # obj is to be check is an instance of try: class_container = _class_or_tuple_to_container( class_or_tuple_node, context=context) except InferenceError: raise UseInferenceDefault try: isinstance_bool = helpers.object_isinstance( obj_node, class_container, context) except AstroidTypeError as exc: raise UseInferenceDefault("TypeError: " + str(exc)) except MroError as exc: raise UseInferenceDefault from exc if isinstance_bool is util.Uninferable: raise UseInferenceDefault return nodes.Const(isinstance_bool) def _class_or_tuple_to_container(node, context=None): # Move inferences results into container # to simplify later logic # raises InferenceError if any of the inferences fall through node_infer = next(node.infer(context=context)) # arg2 MUST be a type or a TUPLE of types # for isinstance if isinstance(node_infer, nodes.Tuple): class_container = [ next(node.infer(context=context)) for node in node_infer.elts ] class_container = [ klass_node for klass_node in class_container if klass_node is not None ] else: class_container = [node_infer] return class_container def infer_len(node, context=None): """Infer length calls :param nodes.Call node: len call to infer :param context.InferenceContext: node context :rtype nodes.Const: a Const node with the inferred length, if possible """ call = arguments.CallSite.from_call(node) if call.keyword_arguments: raise UseInferenceDefault( "TypeError: len() must take no keyword arguments" ) if len(call.positional_arguments) != 1: raise UseInferenceDefault( "TypeError: len() must take exactly one argument " "({len}) given".format(len=len(call.positional_arguments)) ) [argument_node] = call.positional_arguments try: return nodes.Const(helpers.object_len(argument_node)) except (AstroidTypeError, InferenceError) as exc: raise UseInferenceDefault(str(exc)) from exc def infer_str(node, context=None): """Infer str() calls :param nodes.Call node: str() call to infer :param context.InferenceContext: node context :rtype nodes.Const: a Const containing an empty string """ call = arguments.CallSite.from_call(node) if call.keyword_arguments: raise UseInferenceDefault( "TypeError: str() must take no keyword arguments") try: return nodes.Const('') except (AstroidTypeError, InferenceError) as exc: raise UseInferenceDefault(str(exc)) from exc def infer_int(node, context=None): """Infer int() calls :param nodes.Call node: int() call to infer :param context.InferenceContext: node context :rtype nodes.Const: a Const containing the integer value of the int() call """ call = arguments.CallSite.from_call(node) if call.keyword_arguments: raise UseInferenceDefault( "TypeError: int() must take no keyword arguments" ) if call.positional_arguments: try: first_value = next(call.positional_arguments[0].infer(context=context)) except InferenceError as exc: raise UseInferenceDefault(str(exc)) from exc if first_value is util.Uninferable: raise UseInferenceDefault if isinstance(first_value, nodes.Const) and isinstance(first_value.value, (int, str)): try: actual_value = int(first_value.value) except ValueError: return nodes.Const(0) return nodes.Const(actual_value) return nodes.Const(0) def infer_dict_fromkeys(node, context=None): """Infer dict.fromkeys :param nodes.Call node: dict.fromkeys() call to infer :param context.InferenceContext: node context :rtype nodes.Dict: a Dictionary containing the values that astroid was able to infer. In case the inference failed for any reason, an empty dictionary will be inferred instead. """ def _build_dict_with_elements(elements): new_node = nodes.Dict(col_offset=node.col_offset, lineno=node.lineno, parent=node.parent) new_node.postinit(elements) return new_node call = arguments.CallSite.from_call(node) if call.keyword_arguments: raise UseInferenceDefault( "TypeError: int() must take no keyword arguments" ) if len(call.positional_arguments) not in {1, 2}: raise UseInferenceDefault("TypeError: Needs between 1 and 2 positional arguments") default = nodes.Const(None) values = call.positional_arguments[0] try: inferred_values = next(values.infer(context=context)) except InferenceError: return _build_dict_with_elements([]) if inferred_values is util.Uninferable: return _build_dict_with_elements([]) # Limit to a couple of potential values, as this can become pretty complicated accepted_iterable_elements = ( nodes.Const, ) if isinstance(inferred_values, (nodes.List, nodes.Set, nodes.Tuple)): elements = inferred_values.elts for element in elements: if not isinstance(element, accepted_iterable_elements): # Fallback to an empty dict return _build_dict_with_elements([]) elements_with_value = [(element, default) for element in elements] return _build_dict_with_elements(elements_with_value) elif (isinstance(inferred_values, nodes.Const) and isinstance(inferred_values.value, (str, bytes))): elements = [ (nodes.Const(element), default) for element in inferred_values.value ] return _build_dict_with_elements(elements) elif isinstance(inferred_values, nodes.Dict): keys = inferred_values.itered() for key in keys: if not isinstance(key, accepted_iterable_elements): # Fallback to an empty dict return _build_dict_with_elements([]) elements_with_value = [(element, default) for element in keys] return _build_dict_with_elements(elements_with_value) # Fallback to an empty dictionary return _build_dict_with_elements([]) # Builtins inference register_builtin_transform(infer_bool, 'bool') register_builtin_transform(infer_super, 'super') register_builtin_transform(infer_callable, 'callable') register_builtin_transform(infer_getattr, 'getattr') register_builtin_transform(infer_hasattr, 'hasattr') register_builtin_transform(infer_tuple, 'tuple') register_builtin_transform(infer_set, 'set') register_builtin_transform(infer_list, 'list') register_builtin_transform(infer_dict, 'dict') register_builtin_transform(infer_frozenset, 'frozenset') register_builtin_transform(infer_type, 'type') register_builtin_transform(infer_slice, 'slice') register_builtin_transform(infer_isinstance, 'isinstance') register_builtin_transform(infer_issubclass, 'issubclass') register_builtin_transform(infer_len, 'len') register_builtin_transform(infer_str, 'str') register_builtin_transform(infer_int, 'int') register_builtin_transform(infer_dict_fromkeys, 'dict.fromkeys') # Infer object.__new__ calls MANAGER.register_transform( nodes.ClassDef, inference_tip(_infer_object__new__decorator), _infer_object__new__decorator_check ) astroid-2.0.1/astroid/brain/brain_collections.py0000644000076500000240000000535613324063433022545 0ustar claudiustaff00000000000000# -*- coding: utf-8 -*- # Copyright (c) 2016, 2018 Claudiu Popa # Copyright (c) 2016-2017 Łukasz Rogalski # Copyright (c) 2017 Derek Gustafson # Copyright (c) 2018 Ioana Tagirta # Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html # For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER import sys import astroid PY34 = sys.version_info >= (3, 4) PY35 = sys.version_info >= (3, 5) def _collections_transform(): return astroid.parse(''' class defaultdict(dict): default_factory = None def __missing__(self, key): pass def __getitem__(self, key): return default_factory ''' + _deque_mock() + _ordered_dict_mock()) def _deque_mock(): base_deque_class = ''' class deque(object): maxlen = 0 def __init__(self, iterable=None, maxlen=None): self.iterable = iterable or [] def append(self, x): pass def appendleft(self, x): pass def clear(self): pass def count(self, x): return 0 def extend(self, iterable): pass def extendleft(self, iterable): pass def pop(self): return self.iterable[0] def popleft(self): return self.iterable[0] def remove(self, value): pass def reverse(self): return reversed(self.iterable) def rotate(self, n=1): return self def __iter__(self): return self def __reversed__(self): return self.iterable[::-1] def __getitem__(self, index): return self.iterable[index] def __setitem__(self, index, value): pass def __delitem__(self, index): pass def __bool__(self): return bool(self.iterable) def __nonzero__(self): return bool(self.iterable) def __contains__(self, o): return o in self.iterable def __len__(self): return len(self.iterable) def __copy__(self): return deque(self.iterable)''' if PY35: base_deque_class += ''' def copy(self): return deque(self.iterable) def index(self, x, start=0, end=0): return 0 def insert(self, x, i): pass def __add__(self, other): pass def __iadd__(self, other): pass def __mul__(self, other): pass def __imul__(self, other): pass def __rmul__(self, other): pass''' return base_deque_class def _ordered_dict_mock(): base_ordered_dict_class = ''' class OrderedDict(dict): def __reversed__(self): return self[::-1] ''' if PY34: base_ordered_dict_class += ''' def move_to_end(self, key, last=False): pass''' return base_ordered_dict_class astroid.register_module_extender(astroid.MANAGER, 'collections', _collections_transform) astroid-2.0.1/astroid/brain/brain_curses.py0000644000076500000240000000633113324063433021525 0ustar claudiustaff00000000000000# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html # For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER import astroid def _curses_transform(): return astroid.parse(''' A_ALTCHARSET = 1 A_BLINK = 1 A_BOLD = 1 A_DIM = 1 A_INVIS = 1 A_ITALIC = 1 A_NORMAL = 1 A_PROTECT = 1 A_REVERSE = 1 A_STANDOUT = 1 A_UNDERLINE = 1 A_HORIZONTAL = 1 A_LEFT = 1 A_LOW = 1 A_RIGHT = 1 A_TOP = 1 A_VERTICAL = 1 A_CHARTEXT = 1 A_ATTRIBUTES = 1 A_CHARTEXT = 1 A_COLOR = 1 KEY_MIN = 1 KEY_BREAK = 1 KEY_DOWN = 1 KEY_UP = 1 KEY_LEFT = 1 KEY_RIGHT = 1 KEY_HOME = 1 KEY_BACKSPACE = 1 KEY_F0 = 1 KEY_Fn = 1 KEY_DL = 1 KEY_IL = 1 KEY_DC = 1 KEY_IC = 1 KEY_EIC = 1 KEY_CLEAR = 1 KEY_EOS = 1 KEY_EOL = 1 KEY_SF = 1 KEY_SR = 1 KEY_NPAGE = 1 KEY_PPAGE = 1 KEY_STAB = 1 KEY_CTAB = 1 KEY_CATAB = 1 KEY_ENTER = 1 KEY_SRESET = 1 KEY_RESET = 1 KEY_PRINT = 1 KEY_LL = 1 KEY_A1 = 1 KEY_A3 = 1 KEY_B2 = 1 KEY_C1 = 1 KEY_C3 = 1 KEY_BTAB = 1 KEY_BEG = 1 KEY_CANCEL = 1 KEY_CLOSE = 1 KEY_COMMAND = 1 KEY_COPY = 1 KEY_CREATE = 1 KEY_END = 1 KEY_EXIT = 1 KEY_FIND = 1 KEY_HELP = 1 KEY_MARK = 1 KEY_MESSAGE = 1 KEY_MOVE = 1 KEY_NEXT = 1 KEY_OPEN = 1 KEY_OPTIONS = 1 KEY_PREVIOUS = 1 KEY_REDO = 1 KEY_REFERENCE = 1 KEY_REFRESH = 1 KEY_REPLACE = 1 KEY_RESTART = 1 KEY_RESUME = 1 KEY_SAVE = 1 KEY_SBEG = 1 KEY_SCANCEL = 1 KEY_SCOMMAND = 1 KEY_SCOPY = 1 KEY_SCREATE = 1 KEY_SDC = 1 KEY_SDL = 1 KEY_SELECT = 1 KEY_SEND = 1 KEY_SEOL = 1 KEY_SEXIT = 1 KEY_SFIND = 1 KEY_SHELP = 1 KEY_SHOME = 1 KEY_SIC = 1 KEY_SLEFT = 1 KEY_SMESSAGE = 1 KEY_SMOVE = 1 KEY_SNEXT = 1 KEY_SOPTIONS = 1 KEY_SPREVIOUS = 1 KEY_SPRINT = 1 KEY_SREDO = 1 KEY_SREPLACE = 1 KEY_SRIGHT = 1 KEY_SRSUME = 1 KEY_SSAVE = 1 KEY_SSUSPEND = 1 KEY_SUNDO = 1 KEY_SUSPEND = 1 KEY_UNDO = 1 KEY_MOUSE = 1 KEY_RESIZE = 1 KEY_MAX = 1 ACS_BBSS = 1 ACS_BLOCK = 1 ACS_BOARD = 1 ACS_BSBS = 1 ACS_BSSB = 1 ACS_BSSS = 1 ACS_BTEE = 1 ACS_BULLET = 1 ACS_CKBOARD = 1 ACS_DARROW = 1 ACS_DEGREE = 1 ACS_DIAMOND = 1 ACS_GEQUAL = 1 ACS_HLINE = 1 ACS_LANTERN = 1 ACS_LARROW = 1 ACS_LEQUAL = 1 ACS_LLCORNER = 1 ACS_LRCORNER = 1 ACS_LTEE = 1 ACS_NEQUAL = 1 ACS_PI = 1 ACS_PLMINUS = 1 ACS_PLUS = 1 ACS_RARROW = 1 ACS_RTEE = 1 ACS_S1 = 1 ACS_S3 = 1 ACS_S7 = 1 ACS_S9 = 1 ACS_SBBS = 1 ACS_SBSB = 1 ACS_SBSS = 1 ACS_SSBB = 1 ACS_SSBS = 1 ACS_SSSB = 1 ACS_SSSS = 1 ACS_STERLING = 1 ACS_TTEE = 1 ACS_UARROW = 1 ACS_ULCORNER = 1 ACS_URCORNER = 1 ACS_VLINE = 1 COLOR_BLACK = 1 COLOR_BLUE = 1 COLOR_CYAN = 1 COLOR_GREEN = 1 COLOR_MAGENTA = 1 COLOR_RED = 1 COLOR_WHITE = 1 COLOR_YELLOW = 1 ''') astroid.register_module_extender(astroid.MANAGER, 'curses', _curses_transform) astroid-2.0.1/astroid/brain/brain_dateutil.py0000644000076500000240000000136513324063433022036 0ustar claudiustaff00000000000000# Copyright (c) 2015-2016 Claudiu Popa # Copyright (c) 2015 raylu # Copyright (c) 2016 Ceridwen # Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html # For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER """Astroid hooks for dateutil""" import textwrap from astroid import MANAGER, register_module_extender from astroid.builder import AstroidBuilder def dateutil_transform(): return AstroidBuilder(MANAGER).string_build(textwrap.dedent(''' import datetime def parse(timestr, parserinfo=None, **kwargs): return datetime.datetime() ''')) register_module_extender(MANAGER, 'dateutil.parser', dateutil_transform) astroid-2.0.1/astroid/brain/brain_fstrings.py0000644000076500000240000000365313324063433022064 0ustar claudiustaff00000000000000# Copyright (c) 2017 Claudiu Popa # Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html # For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER import sys import astroid def _clone_node_with_lineno(node, parent, lineno): cls = node.__class__ other_fields = node._other_fields _astroid_fields = node._astroid_fields init_params = { 'lineno': lineno, 'col_offset': node.col_offset, 'parent': parent } postinit_params = { param: getattr(node, param) for param in _astroid_fields } if other_fields: init_params.update({ param: getattr(node, param) for param in other_fields }) new_node = cls(**init_params) if hasattr(node, 'postinit') and _astroid_fields: new_node.postinit(**postinit_params) return new_node def _transform_formatted_value(node): if node.value and node.value.lineno == 1: if node.lineno != node.value.lineno: new_node = astroid.FormattedValue( lineno=node.lineno, col_offset=node.col_offset, parent=node.parent ) new_value = _clone_node_with_lineno( node=node.value, lineno=node.lineno, parent=new_node ) new_node.postinit(value=new_value, format_spec=node.format_spec) return new_node if sys.version_info[:2] >= (3, 6): # TODO: this fix tries to *patch* http://bugs.python.org/issue29051 # The problem is that FormattedValue.value, which is a Name node, # has wrong line numbers, usually 1. This creates problems for pylint, # which expects correct line numbers for things such as message control. astroid.MANAGER.register_transform( astroid.FormattedValue, _transform_formatted_value) astroid-2.0.1/astroid/brain/brain_functools.py0000644000076500000240000001425213324063433022236 0ustar claudiustaff00000000000000# Copyright (c) 2016, 2018 Claudiu Popa # Copyright (c) 2018 Bryce Guinta """Astroid hooks for understanding functools library module.""" from itertools import chain import astroid from astroid import arguments from astroid import BoundMethod from astroid import extract_node from astroid import helpers from astroid.interpreter import objectmodel from astroid import MANAGER LRU_CACHE = 'functools.lru_cache' class LruWrappedModel(objectmodel.FunctionModel): """Special attribute model for functions decorated with functools.lru_cache. The said decorators patches at decoration time some functions onto the decorated function. """ @property def py__wrapped__(self): return self._instance @property def pycache_info(self): cache_info = extract_node(''' from functools import _CacheInfo _CacheInfo(0, 0, 0, 0) ''') class CacheInfoBoundMethod(BoundMethod): def infer_call_result(self, caller, context=None): yield helpers.safe_infer(cache_info) return CacheInfoBoundMethod(proxy=self._instance, bound=self._instance) @property def pycache_clear(self): node = extract_node('''def cache_clear(self): pass''') return BoundMethod(proxy=node, bound=self._instance.parent.scope()) def _transform_lru_cache(node, context=None): # TODO: this is not ideal, since the node should be immutable, # but due to https://github.com/PyCQA/astroid/issues/354, # there's not much we can do now. # Replacing the node would work partially, because, # in pylint, the old node would still be available, leading # to spurious false positives. node.special_attributes = LruWrappedModel()(node) return def _functools_partial_inference(node, context=None): call = arguments.CallSite.from_call(node) number_of_positional = len(call.positional_arguments) if number_of_positional < 1: raise astroid.UseInferenceDefault('functools.partial takes at least one argument') if number_of_positional == 1 and not call.keyword_arguments: raise astroid.UseInferenceDefault( 'functools.partial needs at least to have some filled arguments' ) partial_function = call.positional_arguments[0] try: inferred_wrapped_function = next(partial_function.infer(context=context)) except astroid.InferenceError as exc: raise astroid.UseInferenceDefault from exc if inferred_wrapped_function is astroid.Uninferable: raise astroid.UseInferenceDefault('Cannot infer the wrapped function') if not isinstance(inferred_wrapped_function, astroid.FunctionDef): raise astroid.UseInferenceDefault('The wrapped function is not a function') # Determine if the passed keywords into the callsite are supported # by the wrapped function. function_parameters = chain( inferred_wrapped_function.args.args or (), inferred_wrapped_function.args.kwonlyargs or () ) parameter_names = set( param.name for param in function_parameters if isinstance(param, astroid.AssignName) ) if set(call.keyword_arguments) - parameter_names: raise astroid.UseInferenceDefault('wrapped function received unknown parameters') # Return a wrapped() object that can be used further for inference class PartialFunction(astroid.FunctionDef): filled_positionals = len(call.positional_arguments[1:]) filled_keywords = list(call.keyword_arguments) def infer_call_result(self, caller=None, context=None): nonlocal call filled_args = call.positional_arguments[1:] filled_keywords = call.keyword_arguments if context: current_passed_keywords = { keyword for (keyword, _) in context.callcontext.keywords } for keyword, value in filled_keywords.items(): if keyword not in current_passed_keywords: context.callcontext.keywords.append((keyword, value)) call_context_args = context.callcontext.args or [] context.callcontext.args = filled_args + call_context_args return super().infer_call_result( caller=caller, context=context, ) partial_function = PartialFunction( name=inferred_wrapped_function.name, doc=inferred_wrapped_function.doc, lineno=inferred_wrapped_function.lineno, col_offset=inferred_wrapped_function.col_offset, parent=inferred_wrapped_function.parent, ) partial_function.postinit( args=inferred_wrapped_function.args, body=inferred_wrapped_function.body, decorators=inferred_wrapped_function.decorators, returns=inferred_wrapped_function.returns, type_comment_returns=inferred_wrapped_function.type_comment_returns, type_comment_args=inferred_wrapped_function.type_comment_args, ) return iter((partial_function,)) def _looks_like_lru_cache(node): """Check if the given function node is decorated with lru_cache.""" if not node.decorators: return False for decorator in node.decorators.nodes: if not isinstance(decorator, astroid.Call): continue func = helpers.safe_infer(decorator.func) if func in (None, astroid.Uninferable): continue if isinstance(func, astroid.FunctionDef) and func.qname() == LRU_CACHE: return True return False def _looks_like_functools_partial(node): """Check if the given Call node is a functools.partial call""" if isinstance(node.func, astroid.Name): return node.func.name == 'partial' elif isinstance(node.func, astroid.Attribute): return (node.func.attrname == 'partial' and isinstance(node.func.expr, astroid.Name) and node.func.expr.name == 'functools') MANAGER.register_transform( astroid.FunctionDef, _transform_lru_cache, _looks_like_lru_cache, ) MANAGER.register_transform( astroid.Call, astroid.inference_tip(_functools_partial_inference), _looks_like_functools_partial, ) astroid-2.0.1/astroid/brain/brain_gi.py0000644000076500000240000001507013324063433020620 0ustar claudiustaff00000000000000# Copyright (c) 2013-2014 LOGILAB S.A. (Paris, FRANCE) # Copyright (c) 2014 Google, Inc. # Copyright (c) 2014 Cole Robinson # Copyright (c) 2015-2016 Claudiu Popa # Copyright (c) 2015-2016 Ceridwen # Copyright (c) 2015 David Shea # Copyright (c) 2016 Jakub Wilk # Copyright (c) 2016 Giuseppe Scrivano # Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html # For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER """Astroid hooks for the Python 2 GObject introspection bindings. Helps with understanding everything imported from 'gi.repository' """ import inspect import itertools import sys import re import warnings from astroid import MANAGER, AstroidBuildingError, nodes from astroid.builder import AstroidBuilder _inspected_modules = {} _identifier_re = r'^[A-Za-z_]\w*$' def _gi_build_stub(parent): """ Inspect the passed module recursively and build stubs for functions, classes, etc. """ classes = {} functions = {} constants = {} methods = {} for name in dir(parent): if name.startswith("__"): continue # Check if this is a valid name in python if not re.match(_identifier_re, name): continue try: obj = getattr(parent, name) except: continue if inspect.isclass(obj): classes[name] = obj elif (inspect.isfunction(obj) or inspect.isbuiltin(obj)): functions[name] = obj elif (inspect.ismethod(obj) or inspect.ismethoddescriptor(obj)): methods[name] = obj elif (str(obj).startswith(", ) # Only accept function calls with two constant arguments if len(node.args) != 2: return False if not all(isinstance(arg, nodes.Const) for arg in node.args): return False func = node.func if isinstance(func, nodes.Attribute): if func.attrname != 'require_version': return False if isinstance(func.expr, nodes.Name) and func.expr.name == 'gi': return True return False if isinstance(func, nodes.Name): return func.name == 'require_version' return False def _register_require_version(node): # Load the gi.require_version locally try: import gi gi.require_version(node.args[0].value, node.args[1].value) except Exception: pass return node MANAGER.register_failed_import_hook(_import_gi_module) MANAGER.register_transform(nodes.Call, _register_require_version, _looks_like_require_version) astroid-2.0.1/astroid/brain/brain_hashlib.py0000644000076500000240000000245313324063433021634 0ustar claudiustaff00000000000000# Copyright (c) 2016, 2018 Claudiu Popa # Copyright (c) 2018 Ioana Tagirta # Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html # For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER import sys import six import astroid PY36 = sys.version_info >= (3, 6) def _hashlib_transform(): template = ''' class %(name)s(object): def __init__(self, value=''): pass def digest(self): return %(digest)s def copy(self): return self def update(self, value): pass def hexdigest(self): return '' @property def name(self): return %(name)r @property def block_size(self): return 1 @property def digest_size(self): return 1 ''' algorithms = ['md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512'] if PY36: algorithms += [ 'sha3_224', 'sha3_256', 'sha3_384', 'sha3_512', 'shake_128', 'shake_256', 'blake2b', ] classes = "".join( template % {'name': hashfunc, 'digest': 'b""' if six.PY3 else '""'} for hashfunc in algorithms) return astroid.parse(classes) astroid.register_module_extender(astroid.MANAGER, 'hashlib', _hashlib_transform) astroid-2.0.1/astroid/brain/brain_io.py0000644000076500000240000000306513324063433020631 0ustar claudiustaff00000000000000# Copyright (c) 2016 Claudiu Popa # Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html # For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER '''Astroid brain hints for some of the _io C objects.''' import astroid BUFFERED = {'BufferedWriter', 'BufferedReader'} TextIOWrapper = 'TextIOWrapper' FileIO = 'FileIO' BufferedWriter = 'BufferedWriter' def _generic_io_transform(node, name, cls): '''Transform the given name, by adding the given *class* as a member of the node.''' io_module = astroid.MANAGER.ast_from_module_name('_io') attribute_object = io_module[cls] instance = attribute_object.instantiate_class() node.locals[name] = [instance] def _transform_text_io_wrapper(node): # This is not always correct, since it can vary with the type of the descriptor, # being stdout, stderr or stdin. But we cannot get access to the name of the # stream, which is why we are using the BufferedWriter class as a default # value return _generic_io_transform(node, name='buffer', cls=BufferedWriter) def _transform_buffered(node): return _generic_io_transform(node, name='raw', cls=FileIO) astroid.MANAGER.register_transform(astroid.ClassDef, _transform_buffered, lambda node: node.name in BUFFERED) astroid.MANAGER.register_transform(astroid.ClassDef, _transform_text_io_wrapper, lambda node: node.name == TextIOWrapper) astroid-2.0.1/astroid/brain/brain_mechanize.py0000644000076500000240000000155213324063433022164 0ustar claudiustaff00000000000000# Copyright (c) 2012-2013 LOGILAB S.A. (Paris, FRANCE) # Copyright (c) 2014 Google, Inc. # Copyright (c) 2015-2016 Claudiu Popa # Copyright (c) 2016 Ceridwen # Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html # For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER from astroid import MANAGER, register_module_extender from astroid.builder import AstroidBuilder def mechanize_transform(): return AstroidBuilder(MANAGER).string_build(''' class Browser(object): def open(self, url, data=None, timeout=None): return None def open_novisit(self, url, data=None, timeout=None): return None def open_local_file(self, filename): return None ''') register_module_extender(MANAGER, 'mechanize', mechanize_transform) astroid-2.0.1/astroid/brain/brain_multiprocessing.py0000644000076500000240000000607013324063433023450 0ustar claudiustaff00000000000000# Copyright (c) 2016 Claudiu Popa # Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html # For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER import sys import astroid from astroid import exceptions PY34 = sys.version_info >= (3, 4) def _multiprocessing_transform(): module = astroid.parse(''' from multiprocessing.managers import SyncManager def Manager(): return SyncManager() ''') if not PY34: return module # On Python 3.4, multiprocessing uses a getattr lookup inside contexts, # in order to get the attributes they need. Since it's extremely # dynamic, we use this approach to fake it. node = astroid.parse(''' from multiprocessing.context import DefaultContext, BaseContext default = DefaultContext() base = BaseContext() ''') try: context = next(node['default'].infer()) base = next(node['base'].infer()) except exceptions.InferenceError: return module for node in (context, base): for key, value in node.locals.items(): if key.startswith("_"): continue value = value[0] if isinstance(value, astroid.FunctionDef): # We need to rebound this, since otherwise # it will have an extra argument (self). value = astroid.BoundMethod(value, node) module[key] = value return module def _multiprocessing_managers_transform(): return astroid.parse(''' import array import threading import multiprocessing.pool as pool import six class Namespace(object): pass class Value(object): def __init__(self, typecode, value, lock=True): self._typecode = typecode self._value = value def get(self): return self._value def set(self, value): self._value = value def __repr__(self): return '%s(%r, %r)'%(type(self).__name__, self._typecode, self._value) value = property(get, set) def Array(typecode, sequence, lock=True): return array.array(typecode, sequence) class SyncManager(object): Queue = JoinableQueue = six.moves.queue.Queue Event = threading.Event RLock = threading.RLock BoundedSemaphore = threading.BoundedSemaphore Condition = threading.Condition Barrier = threading.Barrier Pool = pool.Pool list = list dict = dict Value = Value Array = Array Namespace = Namespace __enter__ = lambda self: self __exit__ = lambda *args: args def start(self, initializer=None, initargs=None): pass def shutdown(self): pass ''') astroid.register_module_extender(astroid.MANAGER, 'multiprocessing.managers', _multiprocessing_managers_transform) astroid.register_module_extender(astroid.MANAGER, 'multiprocessing', _multiprocessing_transform) astroid-2.0.1/astroid/brain/brain_namedtuple_enum.py0000644000076500000240000003322613324063433023406 0ustar claudiustaff00000000000000# -*- coding: utf-8 -*- # Copyright (c) 2012-2015 LOGILAB S.A. (Paris, FRANCE) # Copyright (c) 2013-2014 Google, Inc. # Copyright (c) 2014-2018 Claudiu Popa # Copyright (c) 2014 Eevee (Alex Munroe) # Copyright (c) 2015-2016 Ceridwen # Copyright (c) 2015 Dmitry Pribysh # Copyright (c) 2015 David Shea # Copyright (c) 2015 Philip Lorenz # Copyright (c) 2016 Jakub Wilk # Copyright (c) 2016 Mateusz Bysiek # Copyright (c) 2017 Hugo # Copyright (c) 2017 Łukasz Rogalski # Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html # For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER """Astroid hooks for the Python standard library.""" import functools import keyword from textwrap import dedent from astroid import ( MANAGER, UseInferenceDefault, inference_tip, InferenceError) from astroid import arguments from astroid import exceptions from astroid import nodes from astroid.builder import AstroidBuilder, extract_node from astroid import util TYPING_NAMEDTUPLE_BASENAMES = { 'NamedTuple', 'typing.NamedTuple' } ENUM_BASE_NAMES = {'Enum', 'IntEnum', 'enum.Enum', 'enum.IntEnum'} def _infer_first(node, context): if node is util.Uninferable: raise UseInferenceDefault try: value = next(node.infer(context=context)) if value is util.Uninferable: raise UseInferenceDefault() else: return value except StopIteration: raise InferenceError() def _find_func_form_arguments(node, context): def _extract_namedtuple_arg_or_keyword(position, key_name=None): if len(args) > position: return _infer_first(args[position], context) if key_name and key_name in found_keywords: return _infer_first(found_keywords[key_name], context) args = node.args keywords = node.keywords found_keywords = { keyword.arg: keyword.value for keyword in keywords } if keywords else {} name = _extract_namedtuple_arg_or_keyword( position=0, key_name='typename' ) names = _extract_namedtuple_arg_or_keyword( position=1, key_name='field_names' ) if name and names: return name.value, names raise UseInferenceDefault() def infer_func_form(node, base_type, context=None, enum=False): """Specific inference function for namedtuple or Python 3 enum. """ # node is a Call node, class name as first argument and generated class # attributes as second argument # namedtuple or enums list of attributes can be a list of strings or a # whitespace-separate string try: name, names = _find_func_form_arguments(node, context) try: attributes = names.value.replace(',', ' ').split() except AttributeError: if not enum: attributes = [_infer_first(const, context).value for const in names.elts] else: # Enums supports either iterator of (name, value) pairs # or mappings. if hasattr(names, 'items') and isinstance(names.items, list): attributes = [_infer_first(const[0], context).value for const in names.items if isinstance(const[0], nodes.Const)] elif hasattr(names, 'elts'): # Enums can support either ["a", "b", "c"] # or [("a", 1), ("b", 2), ...], but they can't # be mixed. if all(isinstance(const, nodes.Tuple) for const in names.elts): attributes = [_infer_first(const.elts[0], context).value for const in names.elts if isinstance(const, nodes.Tuple)] else: attributes = [_infer_first(const, context).value for const in names.elts] else: raise AttributeError if not attributes: raise AttributeError except (AttributeError, exceptions.InferenceError): raise UseInferenceDefault() # If we can't infer the name of the class, don't crash, up to this point # we know it is a namedtuple anyway. name = name or 'Uninferable' # we want to return a Class node instance with proper attributes set class_node = nodes.ClassDef(name, 'docstring') class_node.parent = node.parent # set base class=tuple class_node.bases.append(base_type) # XXX add __init__(*attributes) method for attr in attributes: fake_node = nodes.EmptyNode() fake_node.parent = class_node fake_node.attrname = attr class_node.instance_attrs[attr] = [fake_node] return class_node, name, attributes def _has_namedtuple_base(node): """Predicate for class inference tip :type node: ClassDef :rtype: bool """ return set(node.basenames) & TYPING_NAMEDTUPLE_BASENAMES def _looks_like(node, name): func = node.func if isinstance(func, nodes.Attribute): return func.attrname == name if isinstance(func, nodes.Name): return func.name == name return False _looks_like_namedtuple = functools.partial(_looks_like, name='namedtuple') _looks_like_enum = functools.partial(_looks_like, name='Enum') _looks_like_typing_namedtuple = functools.partial(_looks_like, name='NamedTuple') def infer_named_tuple(node, context=None): """Specific inference function for namedtuple Call node""" tuple_base_name = nodes.Name(name='tuple', parent=node.root()) class_node, name, attributes = infer_func_form( node, tuple_base_name, context=context, ) call_site = arguments.CallSite.from_call(node) func = next(extract_node('import collections; collections.namedtuple').infer()) try: rename = next(call_site.infer_argument(func, 'rename', context)).bool_value() except InferenceError: rename = False if rename: attributes = _get_renamed_namedtuple_atributes(attributes) replace_args = ', '.join( '{arg}=None'.format(arg=arg) for arg in attributes ) field_def = (" {name} = property(lambda self: self[{index:d}], " "doc='Alias for field number {index:d}')") field_defs = '\n'.join(field_def.format(name=name, index=index) for index, name in enumerate(attributes)) fake = AstroidBuilder(MANAGER).string_build(''' class %(name)s(tuple): __slots__ = () _fields = %(fields)r def _asdict(self): return self.__dict__ @classmethod def _make(cls, iterable, new=tuple.__new__, len=len): return new(cls, iterable) def _replace(self, %(replace_args)s): return self def __getnewargs__(self): return tuple(self) %(field_defs)s ''' % {'name': name, 'fields': attributes, 'field_defs': field_defs, 'replace_args': replace_args}) class_node.locals['_asdict'] = fake.body[0].locals['_asdict'] class_node.locals['_make'] = fake.body[0].locals['_make'] class_node.locals['_replace'] = fake.body[0].locals['_replace'] class_node.locals['_fields'] = fake.body[0].locals['_fields'] for attr in attributes: class_node.locals[attr] = fake.body[0].locals[attr] # we use UseInferenceDefault, we can't be a generator so return an iterator return iter([class_node]) def _get_renamed_namedtuple_atributes(field_names): names = list(field_names) seen = set() for i, name in enumerate(field_names): if (not all(c.isalnum() or c == '_' for c in name) or keyword.iskeyword(name) or not name or name[0].isdigit() or name.startswith('_') or name in seen): names[i] = '_%d' % i seen.add(name) return tuple(names) def infer_enum(node, context=None): """ Specific inference function for enum Call node. """ enum_meta = extract_node(''' class EnumMeta(object): 'docstring' def __call__(self, node): class EnumAttribute(object): name = '' value = 0 return EnumAttribute() def __iter__(self): class EnumAttribute(object): name = '' value = 0 return [EnumAttribute()] def __next__(self): return next(iter(self)) def __getitem__(self, attr): class Value(object): @property def name(self): return '' @property def value(self): return attr return Value() __members__ = [''] ''') class_node = infer_func_form(node, enum_meta, context=context, enum=True)[0] return iter([class_node.instantiate_class()]) def infer_enum_class(node): """ Specific inference for enums. """ for basename in node.basenames: # TODO: doesn't handle subclasses yet. This implementation # is a hack to support enums. if basename not in ENUM_BASE_NAMES: continue if node.root().name == 'enum': # Skip if the class is directly from enum module. break for local, values in node.locals.items(): if any(not isinstance(value, nodes.AssignName) for value in values): continue targets = [] stmt = values[0].statement() if isinstance(stmt, nodes.Assign): if isinstance(stmt.targets[0], nodes.Tuple): targets = stmt.targets[0].itered() else: targets = stmt.targets elif isinstance(stmt, nodes.AnnAssign): targets = [stmt.target] inferred_return_value = None if isinstance(stmt.value, nodes.Const): if isinstance(stmt.value.value, str): inferred_return_value = '"{}"'.format(stmt.value.value) else: inferred_return_value = stmt.value.value new_targets = [] for target in targets: # Replace all the assignments with our mocked class. classdef = dedent(''' class {name}({types}): @property def value(self): return {return_value} @property def name(self): return {name} '''.format( name=target.name, types=', '.join(node.basenames), return_value=inferred_return_value, )) fake = AstroidBuilder(MANAGER).string_build(classdef)[target.name] fake.parent = target.parent for method in node.mymethods(): fake.locals[method.name] = [method] new_targets.append(fake.instantiate_class()) node.locals[local] = new_targets break return node def infer_typing_namedtuple_class(class_node, context=None): """Infer a subclass of typing.NamedTuple""" # Check if it has the corresponding bases annassigns_fields = [ annassign.target.name for annassign in class_node.body if isinstance(annassign, nodes.AnnAssign) ] code = dedent(''' from collections import namedtuple namedtuple({typename!r}, {fields!r}) ''').format( typename=class_node.name, fields=",".join(annassigns_fields) ) node = extract_node(code) generated_class_node = next(infer_named_tuple(node, context)) for method in class_node.mymethods(): generated_class_node.locals[method.name] = [method] return iter((generated_class_node, )) def infer_typing_namedtuple(node, context=None): """Infer a typing.NamedTuple(...) call.""" # This is essentially a namedtuple with different arguments # so we extract the args and infer a named tuple. try: func = next(node.func.infer()) except InferenceError: raise UseInferenceDefault if func.qname() != 'typing.NamedTuple': raise UseInferenceDefault if len(node.args) != 2: raise UseInferenceDefault if not isinstance(node.args[1], (nodes.List, nodes.Tuple)): raise UseInferenceDefault names = [] for elt in node.args[1].elts: if not isinstance(elt, (nodes.List, nodes.Tuple)): raise UseInferenceDefault if len(elt.elts) != 2: raise UseInferenceDefault names.append(elt.elts[0].as_string()) typename = node.args[0].as_string() node = extract_node('namedtuple(%(typename)s, (%(fields)s,)) ' % {'typename': typename, 'fields': ",".join(names)}) return infer_named_tuple(node, context) MANAGER.register_transform( nodes.Call, inference_tip(infer_named_tuple), _looks_like_namedtuple, ) MANAGER.register_transform( nodes.Call, inference_tip(infer_enum), _looks_like_enum, ) MANAGER.register_transform( nodes.ClassDef, infer_enum_class, predicate=lambda cls: any(basename for basename in cls.basenames if basename in ENUM_BASE_NAMES) ) MANAGER.register_transform( nodes.ClassDef, inference_tip(infer_typing_namedtuple_class), _has_namedtuple_base, ) MANAGER.register_transform( nodes.Call, inference_tip(infer_typing_namedtuple), _looks_like_typing_namedtuple, ) astroid-2.0.1/astroid/brain/brain_nose.py0000644000076500000240000000426713324063433021173 0ustar claudiustaff00000000000000# Copyright (c) 2015-2016 Claudiu Popa # Copyright (c) 2016 Ceridwen # Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html # For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER """Hooks for nose library.""" import re import textwrap import astroid import astroid.builder _BUILDER = astroid.builder.AstroidBuilder(astroid.MANAGER) def _pep8(name, caps=re.compile('([A-Z])')): return caps.sub(lambda m: '_' + m.groups()[0].lower(), name) def _nose_tools_functions(): """Get an iterator of names and bound methods.""" module = _BUILDER.string_build(textwrap.dedent(''' import unittest class Test(unittest.TestCase): pass a = Test() ''')) try: case = next(module['a'].infer()) except astroid.InferenceError: return for method in case.methods(): if method.name.startswith('assert') and '_' not in method.name: pep8_name = _pep8(method.name) yield pep8_name, astroid.BoundMethod(method, case) if method.name == 'assertEqual': # nose also exports assert_equals. yield 'assert_equals', astroid.BoundMethod(method, case) def _nose_tools_transform(node): for method_name, method in _nose_tools_functions(): node.locals[method_name] = [method] def _nose_tools_trivial_transform(): """Custom transform for the nose.tools module.""" stub = _BUILDER.string_build('''__all__ = []''') all_entries = ['ok_', 'eq_'] for pep8_name, method in _nose_tools_functions(): all_entries.append(pep8_name) stub[pep8_name] = method # Update the __all__ variable, since nose.tools # does this manually with .append. all_assign = stub['__all__'].parent all_object = astroid.List(all_entries) all_object.parent = all_assign all_assign.value = all_object return stub astroid.register_module_extender(astroid.MANAGER, 'nose.tools.trivial', _nose_tools_trivial_transform) astroid.MANAGER.register_transform(astroid.Module, _nose_tools_transform, lambda n: n.name == 'nose.tools') astroid-2.0.1/astroid/brain/brain_numpy.py0000644000076500000240000003471713324063433021402 0ustar claudiustaff00000000000000# Copyright (c) 2015-2016, 2018 Claudiu Popa # Copyright (c) 2016 Ceridwen # Copyright (c) 2017-2018 hippo91 # Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html # For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER """Astroid hooks for numpy.""" import astroid def numpy_random_mtrand_transform(): return astroid.parse(''' def beta(a, b, size=None): pass def binomial(n, p, size=None): pass def bytes(length): pass def chisquare(df, size=None): pass def choice(a, size=None, replace=True, p=None): pass def dirichlet(alpha, size=None): pass def exponential(scale=1.0, size=None): pass def f(dfnum, dfden, size=None): pass def gamma(shape, scale=1.0, size=None): pass def geometric(p, size=None): pass def get_state(): pass def gumbel(loc=0.0, scale=1.0, size=None): pass def hypergeometric(ngood, nbad, nsample, size=None): pass def laplace(loc=0.0, scale=1.0, size=None): pass def logistic(loc=0.0, scale=1.0, size=None): pass def lognormal(mean=0.0, sigma=1.0, size=None): pass def logseries(p, size=None): pass def multinomial(n, pvals, size=None): pass def multivariate_normal(mean, cov, size=None): pass def negative_binomial(n, p, size=None): pass def noncentral_chisquare(df, nonc, size=None): pass def noncentral_f(dfnum, dfden, nonc, size=None): pass def normal(loc=0.0, scale=1.0, size=None): pass def pareto(a, size=None): pass def permutation(x): pass def poisson(lam=1.0, size=None): pass def power(a, size=None): pass def rand(*args): pass def randint(low, high=None, size=None, dtype='l'): pass def randn(*args): pass def random_integers(low, high=None, size=None): pass def random_sample(size=None): pass def rayleigh(scale=1.0, size=None): pass def seed(seed=None): pass def set_state(state): pass def shuffle(x): pass def standard_cauchy(size=None): pass def standard_exponential(size=None): pass def standard_gamma(shape, size=None): pass def standard_normal(size=None): pass def standard_t(df, size=None): pass def triangular(left, mode, right, size=None): pass def uniform(low=0.0, high=1.0, size=None): pass def vonmises(mu, kappa, size=None): pass def wald(mean, scale, size=None): pass def weibull(a, size=None): pass def zipf(a, size=None): pass ''') def numpy_core_umath_transform(): ufunc_optional_keyword_arguments = ("""out=None, where=True, casting='same_kind', order='K', """ """dtype=None, subok=True""") return astroid.parse(''' # Constants e = 2.718281828459045 euler_gamma = 0.5772156649015329 # No arg functions def geterrobj(): pass # One arg functions def seterrobj(errobj): pass # One arg functions with optional kwargs def arccos(x, {opt_args:s}): pass def arccosh(x, {opt_args:s}): pass def arcsin(x, {opt_args:s}): pass def arcsinh(x, {opt_args:s}): pass def arctan(x, {opt_args:s}): pass def arctanh(x, {opt_args:s}): pass def cbrt(x, {opt_args:s}): pass def conj(x, {opt_args:s}): pass def conjugate(x, {opt_args:s}): pass def cosh(x, {opt_args:s}): pass def deg2rad(x, {opt_args:s}): pass def degrees(x, {opt_args:s}): pass def exp2(x, {opt_args:s}): pass def expm1(x, {opt_args:s}): pass def fabs(x, {opt_args:s}): pass def frexp(x, {opt_args:s}): pass def isfinite(x, {opt_args:s}): pass def isinf(x, {opt_args:s}): pass def log(x, {opt_args:s}): pass def log1p(x, {opt_args:s}): pass def log2(x, {opt_args:s}): pass def logical_not(x, {opt_args:s}): pass def modf(x, {opt_args:s}): pass def negative(x, {opt_args:s}): pass def rad2deg(x, {opt_args:s}): pass def radians(x, {opt_args:s}): pass def reciprocal(x, {opt_args:s}): pass def rint(x, {opt_args:s}): pass def sign(x, {opt_args:s}): pass def signbit(x, {opt_args:s}): pass def sinh(x, {opt_args:s}): pass def spacing(x, {opt_args:s}): pass def square(x, {opt_args:s}): pass def tan(x, {opt_args:s}): pass def tanh(x, {opt_args:s}): pass def trunc(x, {opt_args:s}): pass # Two args functions with optional kwargs def bitwise_and(x1, x2, {opt_args:s}): pass def bitwise_or(x1, x2, {opt_args:s}): pass def bitwise_xor(x1, x2, {opt_args:s}): pass def copysign(x1, x2, {opt_args:s}): pass def divide(x1, x2, {opt_args:s}): pass def equal(x1, x2, {opt_args:s}): pass def float_power(x1, x2, {opt_args:s}): pass def floor_divide(x1, x2, {opt_args:s}): pass def fmax(x1, x2, {opt_args:s}): pass def fmin(x1, x2, {opt_args:s}): pass def fmod(x1, x2, {opt_args:s}): pass def greater(x1, x2, {opt_args:s}): pass def hypot(x1, x2, {opt_args:s}): pass def ldexp(x1, x2, {opt_args:s}): pass def left_shift(x1, x2, {opt_args:s}): pass def less(x1, x2, {opt_args:s}): pass def logaddexp(x1, x2, {opt_args:s}): pass def logaddexp2(x1, x2, {opt_args:s}): pass def logical_and(x1, x2, {opt_args:s}): pass def logical_or(x1, x2, {opt_args:s}): pass def logical_xor(x1, x2, {opt_args:s}): pass def maximum(x1, x2, {opt_args:s}): pass def minimum(x1, x2, {opt_args:s}): pass def nextafter(x1, x2, {opt_args:s}): pass def not_equal(x1, x2, {opt_args:s}): pass def power(x1, x2, {opt_args:s}): pass def remainder(x1, x2, {opt_args:s}): pass def right_shift(x1, x2, {opt_args:s}): pass def subtract(x1, x2, {opt_args:s}): pass def true_divide(x1, x2, {opt_args:s}): pass '''.format(opt_args=ufunc_optional_keyword_arguments)) def numpy_core_numerictypes_transform(): return astroid.parse(''' # different types defined in numerictypes.py class generic(object): def __init__(self, value): self.T = None self.base = None self.data = None self.dtype = None self.flags = None self.flat = None self.imag = None self.itemsize = None self.nbytes = None self.ndim = None self.real = None self.size = None self.strides = None def all(self): pass def any(self): pass def argmax(self): pass def argmin(self): pass def argsort(self): pass def astype(self): pass def base(self): pass def byteswap(self): pass def choose(self): pass def clip(self): pass def compress(self): pass def conj(self): pass def conjugate(self): pass def copy(self): pass def cumprod(self): pass def cumsum(self): pass def data(self): pass def diagonal(self): pass def dtype(self): pass def dump(self): pass def dumps(self): pass def fill(self): pass def flags(self): pass def flat(self): pass def flatten(self): pass def getfield(self): pass def imag(self): pass def item(self): pass def itemset(self): pass def itemsize(self): pass def max(self): pass def mean(self): pass def min(self): pass def nbytes(self): pass def ndim(self): pass def newbyteorder(self): pass def nonzero(self): pass def prod(self): pass def ptp(self): pass def put(self): pass def ravel(self): pass def real(self): pass def repeat(self): pass def reshape(self): pass def resize(self): pass def round(self): pass def searchsorted(self): pass def setfield(self): pass def setflags(self): pass def shape(self): pass def size(self): pass def sort(self): pass def squeeze(self): pass def std(self): pass def strides(self): pass def sum(self): pass def swapaxes(self): pass def take(self): pass def tobytes(self): pass def tofile(self): pass def tolist(self): pass def tostring(self): pass def trace(self): pass def transpose(self): pass def var(self): pass def view(self): pass class dtype(object): def __init__(self, obj, align=False, copy=False): self.alignment = None self.base = None self.byteorder = None self.char = None self.descr = None self.fields = None self.flags = None self.hasobject = None self.isalignedstruct = None self.isbuiltin = None self.isnative = None self.itemsize = None self.kind = None self.metadata = None self.name = None self.names = None self.num = None self.shape = None self.str = None self.subdtype = None self.type = None def newbyteorder(self, new_order='S'): pass class ndarray(object): def __init__(self, shape, dtype=float, buffer=None, offset=0, strides=None, order=None): self.T = None self.base = None self.ctypes = None self.data = None self.dtype = None self.flags = None self.flat = None self.imag = None self.itemsize = None self.nbytes = None self.ndim = None self.real = None self.shape = None self.size = None self.strides = None def all(self): pass def any(self): pass def argmax(self): pass def argmin(self): pass def argpartition(self): pass def argsort(self): pass def astype(self): pass def byteswap(self): pass def choose(self): pass def clip(self): pass def compress(self): pass def conj(self): pass def conjugate(self): pass def copy(self): pass def cumprod(self): pass def cumsum(self): pass def diagonal(self): pass def dot(self): pass def dump(self): pass def dumps(self): pass def fill(self): pass def flatten(self): pass def getfield(self): pass def item(self): pass def itemset(self): pass def max(self): pass def mean(self): pass def min(self): pass def newbyteorder(self): pass def nonzero(self): pass def partition(self): pass def prod(self): pass def ptp(self): pass def put(self): pass def ravel(self): pass def repeat(self): pass def reshape(self): pass def resize(self): pass def round(self): pass def searchsorted(self): pass def setfield(self): pass def setflags(self): pass def sort(self): pass def squeeze(self): pass def std(self): pass def sum(self): pass def swapaxes(self): pass def take(self): pass def tobytes(self): pass def tofile(self): pass def tolist(self): pass def tostring(self): pass def trace(self): pass def transpose(self): pass def var(self): pass def view(self): pass class busdaycalendar(object): def __init__(self, weekmask='1111100', holidays=None): self.holidays = None self.weekmask = None class flexible(generic): pass class bool_(generic): pass class number(generic): pass class datetime64(generic): pass class void(flexible): def __init__(self, *args, **kwargs): self.base = None self.dtype = None self.flags = None def getfield(self): pass def setfield(self): pass class character(flexible): pass class integer(number): def __init__(self, value): self.denominator = None self.numerator = None class inexact(number): pass class str_(str, character): def maketrans(self, x, y=None, z=None): pass class bytes_(bytes, character): def fromhex(self, string): pass def maketrans(self, frm, to): pass class signedinteger(integer): pass class unsignedinteger(integer): pass class complexfloating(inexact): pass class floating(inexact): pass class float64(floating, float): def fromhex(self, string): pass class uint64(unsignedinteger): pass class complex64(complexfloating): pass class int16(signedinteger): pass class float96(floating): pass class int8(signedinteger): pass class uint32(unsignedinteger): pass class uint8(unsignedinteger): pass class _typedict(dict): pass class complex192(complexfloating): pass class timedelta64(signedinteger): pass class int32(signedinteger): pass class uint16(unsignedinteger): pass class float32(floating): pass class complex128(complexfloating, complex): pass class float16(floating): pass class int64(signedinteger): pass buffer_type = memoryview bool8 = bool_ byte = int8 bytes0 = bytes_ cdouble = complex128 cfloat = complex128 clongdouble = complex192 clongfloat = complex192 complex_ = complex128 csingle = complex64 double = float64 float_ = float64 half = float16 int0 = int32 int_ = int32 intc = int32 intp = int32 long = int32 longcomplex = complex192 longdouble = float96 longfloat = float96 longlong = int64 object0 = object_ object_ = object_ short = int16 single = float32 singlecomplex = complex64 str0 = str_ string_ = bytes_ ubyte = uint8 uint = uint32 uint0 = uint32 uintc = uint32 uintp = uint32 ulonglong = uint64 unicode = str_ unicode_ = str_ ushort = uint16 void0 = void ''') def numpy_funcs(): return astroid.parse(''' import builtins def sum(a, axis=None, dtype=None, out=None, keepdims=None): return builtins.sum(a) ''') astroid.register_module_extender(astroid.MANAGER, 'numpy.core.umath', numpy_core_umath_transform) astroid.register_module_extender(astroid.MANAGER, 'numpy.random.mtrand', numpy_random_mtrand_transform) astroid.register_module_extender(astroid.MANAGER, 'numpy.core.numerictypes', numpy_core_numerictypes_transform) astroid.register_module_extender(astroid.MANAGER, 'numpy', numpy_funcs) astroid-2.0.1/astroid/brain/brain_pkg_resources.py0000644000076500000240000000430713324063433023075 0ustar claudiustaff00000000000000# Copyright (c) 2016, 2018 Claudiu Popa # Copyright (c) 2016 Ceridwen # Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html # For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER import astroid from astroid import parse from astroid import inference_tip from astroid import register_module_extender from astroid import MANAGER def pkg_resources_transform(): return parse(''' def require(*requirements): return pkg_resources.working_set.require(*requirements) def run_script(requires, script_name): return pkg_resources.working_set.run_script(requires, script_name) def iter_entry_points(group, name=None): return pkg_resources.working_set.iter_entry_points(group, name) def resource_exists(package_or_requirement, resource_name): return get_provider(package_or_requirement).has_resource(resource_name) def resource_isdir(package_or_requirement, resource_name): return get_provider(package_or_requirement).resource_isdir( resource_name) def resource_filename(package_or_requirement, resource_name): return get_provider(package_or_requirement).get_resource_filename( self, resource_name) def resource_stream(package_or_requirement, resource_name): return get_provider(package_or_requirement).get_resource_stream( self, resource_name) def resource_string(package_or_requirement, resource_name): return get_provider(package_or_requirement).get_resource_string( self, resource_name) def resource_listdir(package_or_requirement, resource_name): return get_provider(package_or_requirement).resource_listdir( resource_name) def extraction_error(): pass def get_cache_path(archive_name, names=()): extract_path = self.extraction_path or get_default_cache() target_path = os.path.join(extract_path, archive_name+'-tmp', *names) return target_path def postprocess(tempname, filename): pass def set_extraction_path(path): pass def cleanup_resources(force=False): pass def get_distribution(dist): return Distribution(dist) _namespace_packages = {} ''') register_module_extender(MANAGER, 'pkg_resources', pkg_resources_transform) astroid-2.0.1/astroid/brain/brain_pytest.py0000644000076500000240000000450113324063433021546 0ustar claudiustaff00000000000000# Copyright (c) 2014-2016 Claudiu Popa # Copyright (c) 2014 Jeff Quast # Copyright (c) 2014 Google, Inc. # Copyright (c) 2016 Florian Bruhin # Copyright (c) 2016 Ceridwen # Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html # For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER """Astroid hooks for pytest.""" from __future__ import absolute_import from astroid import MANAGER, register_module_extender from astroid.builder import AstroidBuilder def pytest_transform(): return AstroidBuilder(MANAGER).string_build(''' try: import _pytest.mark import _pytest.recwarn import _pytest.runner import _pytest.python import _pytest.skipping import _pytest.assertion except ImportError: pass else: deprecated_call = _pytest.recwarn.deprecated_call warns = _pytest.recwarn.warns exit = _pytest.runner.exit fail = _pytest.runner.fail skip = _pytest.runner.skip importorskip = _pytest.runner.importorskip xfail = _pytest.skipping.xfail mark = _pytest.mark.MarkGenerator() raises = _pytest.python.raises # New in pytest 3.0 try: approx = _pytest.python.approx register_assert_rewrite = _pytest.assertion.register_assert_rewrite except AttributeError: pass # Moved in pytest 3.0 try: import _pytest.freeze_support freeze_includes = _pytest.freeze_support.freeze_includes except ImportError: try: import _pytest.genscript freeze_includes = _pytest.genscript.freeze_includes except ImportError: pass try: import _pytest.debugging set_trace = _pytest.debugging.pytestPDB().set_trace except ImportError: try: import _pytest.pdb set_trace = _pytest.pdb.pytestPDB().set_trace except ImportError: pass try: import _pytest.fixtures fixture = _pytest.fixtures.fixture yield_fixture = _pytest.fixtures.yield_fixture except ImportError: try: import _pytest.python fixture = _pytest.python.fixture yield_fixture = _pytest.python.yield_fixture except ImportError: pass ''') register_module_extender(MANAGER, 'pytest', pytest_transform) register_module_extender(MANAGER, 'py.test', pytest_transform) astroid-2.0.1/astroid/brain/brain_qt.py0000644000076500000240000000460213324063433020644 0ustar claudiustaff00000000000000# Copyright (c) 2015-2016 Claudiu Popa # Copyright (c) 2016 Ceridwen # Copyright (c) 2017 Roy Wright # Copyright (c) 2018 Ashley Whetter # Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html # For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER """Astroid hooks for the PyQT library.""" from astroid import MANAGER, register_module_extender from astroid.builder import AstroidBuilder from astroid import nodes from astroid import parse def _looks_like_signal(node, signal_name='pyqtSignal'): if '__class__' in node.instance_attrs: try: cls = node.instance_attrs['__class__'][0] return cls.name == signal_name except AttributeError: # return False if the cls does not have a name attribute pass return False def transform_pyqt_signal(node): module = parse(''' class pyqtSignal(object): def connect(self, slot, type=None, no_receiver_check=False): pass def disconnect(self, slot): pass def emit(self, *args): pass ''') signal_cls = module['pyqtSignal'] node.instance_attrs['emit'] = signal_cls['emit'] node.instance_attrs['disconnect'] = signal_cls['disconnect'] node.instance_attrs['connect'] = signal_cls['connect'] def transform_pyside_signal(node): module = parse(''' class NotPySideSignal(object): def connect(self, receiver, type=None): pass def disconnect(self, receiver): pass def emit(self, *args): pass ''') signal_cls = module['NotPySideSignal'] node.instance_attrs['connect'] = signal_cls['connect'] node.instance_attrs['disconnect'] = signal_cls['disconnect'] node.instance_attrs['emit'] = signal_cls['emit'] def pyqt4_qtcore_transform(): return AstroidBuilder(MANAGER).string_build(''' def SIGNAL(signal_name): pass class QObject(object): def emit(self, signal): pass ''') register_module_extender(MANAGER, 'PyQt4.QtCore', pyqt4_qtcore_transform) MANAGER.register_transform(nodes.FunctionDef, transform_pyqt_signal, _looks_like_signal) MANAGER.register_transform(nodes.ClassDef, transform_pyside_signal, lambda node: node.qname() == 'PySide.QtCore.Signal') astroid-2.0.1/astroid/brain/brain_random.py0000644000076500000240000000511113324063433021474 0ustar claudiustaff00000000000000# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html # For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER import random import astroid from astroid import helpers from astroid import MANAGER ACCEPTED_ITERABLES_FOR_SAMPLE = ( astroid.List, astroid.Set, astroid.Tuple, ) def _clone_node_with_lineno(node, parent, lineno): cls = node.__class__ other_fields = node._other_fields _astroid_fields = node._astroid_fields init_params = { 'lineno': lineno, 'col_offset': node.col_offset, 'parent': parent } postinit_params = { param: getattr(node, param) for param in _astroid_fields } if other_fields: init_params.update({ param: getattr(node, param) for param in other_fields }) new_node = cls(**init_params) if hasattr(node, 'postinit') and _astroid_fields: new_node.postinit(**postinit_params) return new_node def infer_random_sample(node, context=None): if len(node.args) != 2: raise astroid.UseInferenceDefault length = node.args[1] if not isinstance(length, astroid.Const): raise astroid.UseInferenceDefault if not isinstance(length.value, int): raise astroid.UseInferenceDefault inferred_sequence = helpers.safe_infer(node.args[0], context=context) if inferred_sequence in (None, astroid.Uninferable): raise astroid.UseInferenceDefault if not isinstance(inferred_sequence, ACCEPTED_ITERABLES_FOR_SAMPLE): raise astroid.UseInferenceDefault if length.value > len(inferred_sequence.elts): # In this case, this will raise a ValueError raise astroid.UseInferenceDefault try: elts = random.sample(inferred_sequence.elts, length.value) except ValueError: raise astroid.UseInferenceDefault new_node = astroid.List( lineno=node.lineno, col_offset=node.col_offset, parent=node.scope(), ) new_elts = [ _clone_node_with_lineno( elt, parent=new_node, lineno=new_node.lineno ) for elt in elts ] new_node.postinit(new_elts) return iter((new_node, )) def _looks_like_random_sample(node): func = node.func if isinstance(func, astroid.Attribute): return func.attrname == 'sample' if isinstance(func, astroid.Name): return func.name == 'sample' return False MANAGER.register_transform( astroid.Call, astroid.inference_tip(infer_random_sample), _looks_like_random_sample, ) astroid-2.0.1/astroid/brain/brain_re.py0000644000076500000240000000212213324063433020621 0ustar claudiustaff00000000000000# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html # For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER import sys import astroid PY36 = sys.version_info >= (3, 6) if PY36: # Since Python 3.6 there is the RegexFlag enum # where every entry will be exposed via updating globals() def _re_transform(): return astroid.parse(''' import sre_compile ASCII = sre_compile.SRE_FLAG_ASCII IGNORECASE = sre_compile.SRE_FLAG_IGNORECASE LOCALE = sre_compile.SRE_FLAG_LOCALE UNICODE = sre_compile.SRE_FLAG_UNICODE MULTILINE = sre_compile.SRE_FLAG_MULTILINE DOTALL = sre_compile.SRE_FLAG_DOTALL VERBOSE = sre_compile.SRE_FLAG_VERBOSE A = ASCII I = IGNORECASE L = LOCALE U = UNICODE M = MULTILINE S = DOTALL X = VERBOSE TEMPLATE = sre_compile.SRE_FLAG_TEMPLATE T = TEMPLATE DEBUG = sre_compile.SRE_FLAG_DEBUG ''') astroid.register_module_extender(astroid.MANAGER, 're', _re_transform) astroid-2.0.1/astroid/brain/brain_six.py0000644000076500000240000001401313324063433021020 0ustar claudiustaff00000000000000# Copyright (c) 2014-2016, 2018 Claudiu Popa # Copyright (c) 2015-2016 Ceridwen # Copyright (c) 2018 Bryce Guinta # Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html # For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER """Astroid hooks for six module.""" from textwrap import dedent from astroid import MANAGER, register_module_extender from astroid.builder import AstroidBuilder from astroid.exceptions import AstroidBuildingError, InferenceError, AttributeInferenceError from astroid import nodes SIX_ADD_METACLASS = 'six.add_metaclass' def _indent(text, prefix, predicate=None): """Adds 'prefix' to the beginning of selected lines in 'text'. If 'predicate' is provided, 'prefix' will only be added to the lines where 'predicate(line)' is True. If 'predicate' is not provided, it will default to adding 'prefix' to all non-empty lines that do not consist solely of whitespace characters. """ if predicate is None: predicate = lambda line: line.strip() def prefixed_lines(): for line in text.splitlines(True): yield prefix + line if predicate(line) else line return ''.join(prefixed_lines()) _IMPORTS = """ import _io cStringIO = _io.StringIO filter = filter from itertools import filterfalse input = input from sys import intern map = map range = range from imp import reload as reload_module from functools import reduce from shlex import quote as shlex_quote from io import StringIO from collections import UserDict, UserList, UserString xrange = range zip = zip from itertools import zip_longest import builtins import configparser import copyreg import _dummy_thread import http.cookiejar as http_cookiejar import http.cookies as http_cookies import html.entities as html_entities import html.parser as html_parser import http.client as http_client import http.server as http_server BaseHTTPServer = CGIHTTPServer = SimpleHTTPServer = http.server import pickle as cPickle import queue import reprlib import socketserver import _thread import winreg import xmlrpc.server as xmlrpc_server import xmlrpc.client as xmlrpc_client import urllib.robotparser as urllib_robotparser import email.mime.multipart as email_mime_multipart import email.mime.nonmultipart as email_mime_nonmultipart import email.mime.text as email_mime_text import email.mime.base as email_mime_base import urllib.parse as urllib_parse import urllib.error as urllib_error import tkinter import tkinter.dialog as tkinter_dialog import tkinter.filedialog as tkinter_filedialog import tkinter.scrolledtext as tkinter_scrolledtext import tkinter.simpledialog as tkinder_simpledialog import tkinter.tix as tkinter_tix import tkinter.ttk as tkinter_ttk import tkinter.constants as tkinter_constants import tkinter.dnd as tkinter_dnd import tkinter.colorchooser as tkinter_colorchooser import tkinter.commondialog as tkinter_commondialog import tkinter.filedialog as tkinter_tkfiledialog import tkinter.font as tkinter_font import tkinter.messagebox as tkinter_messagebox import urllib import urllib.request as urllib_request import urllib.robotparser as urllib_robotparser import urllib.parse as urllib_parse import urllib.error as urllib_error """ def six_moves_transform(): code = dedent(''' class Moves(object): {} moves = Moves() ''').format(_indent(_IMPORTS, " ")) module = AstroidBuilder(MANAGER).string_build(code) module.name = 'six.moves' return module def _six_fail_hook(modname): """Fix six.moves imports due to the dynamic nature of this class. Construct a pseudo-module which contains all the necessary imports for six :param modname: Name of failed module :type modname: str :return: An astroid module :rtype: nodes.Module """ attribute_of = (modname != "six.moves" and modname.startswith("six.moves")) if modname != 'six.moves' and not attribute_of: raise AstroidBuildingError(modname=modname) module = AstroidBuilder(MANAGER).string_build(_IMPORTS) module.name = 'six.moves' if attribute_of: # Facilitate import of submodules in Moves start_index = len(module.name) attribute = modname[start_index:].lstrip(".").replace(".", "_") try: import_attr = module.getattr(attribute)[0] except AttributeInferenceError: raise AstroidBuildingError(modname=modname) if isinstance(import_attr, nodes.Import): submodule = MANAGER.ast_from_module_name(import_attr.names[0][0]) return submodule # Let dummy submodule imports pass through # This will cause an Uninferable result, which is okay return module def _looks_like_decorated_with_six_add_metaclass(node): if not node.decorators: return False for decorator in node.decorators.nodes: if not isinstance(decorator, nodes.Call): continue if decorator.func.as_string() == SIX_ADD_METACLASS: return True return False def transform_six_add_metaclass(node): """Check if the given class node is decorated with *six.add_metaclass* If so, inject its argument as the metaclass of the underlying class. """ if not node.decorators: return for decorator in node.decorators.nodes: if not isinstance(decorator, nodes.Call): continue try: func = next(decorator.func.infer()) except InferenceError: continue if func.qname() == SIX_ADD_METACLASS and decorator.args: metaclass = decorator.args[0] node._metaclass = metaclass return node register_module_extender(MANAGER, 'six', six_moves_transform) register_module_extender(MANAGER, 'requests.packages.urllib3.packages.six', six_moves_transform) MANAGER.register_failed_import_hook(_six_fail_hook) MANAGER.register_transform( nodes.ClassDef, transform_six_add_metaclass, _looks_like_decorated_with_six_add_metaclass, ) astroid-2.0.1/astroid/brain/brain_ssl.py0000644000076500000240000000673013324063433021025 0ustar claudiustaff00000000000000# Copyright (c) 2016 Claudiu Popa # Copyright (c) 2016 Ceridwen # Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html # For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER """Astroid hooks for the ssl library.""" from astroid import MANAGER, register_module_extender from astroid.builder import AstroidBuilder from astroid import nodes from astroid import parse def ssl_transform(): return parse(''' from _ssl import OPENSSL_VERSION_NUMBER, OPENSSL_VERSION_INFO, OPENSSL_VERSION from _ssl import _SSLContext, MemoryBIO from _ssl import ( SSLError, SSLZeroReturnError, SSLWantReadError, SSLWantWriteError, SSLSyscallError, SSLEOFError, ) from _ssl import CERT_NONE, CERT_OPTIONAL, CERT_REQUIRED from _ssl import txt2obj as _txt2obj, nid2obj as _nid2obj from _ssl import RAND_status, RAND_add, RAND_bytes, RAND_pseudo_bytes try: from _ssl import RAND_egd except ImportError: # LibreSSL does not provide RAND_egd pass from _ssl import (OP_ALL, OP_CIPHER_SERVER_PREFERENCE, OP_NO_COMPRESSION, OP_NO_SSLv2, OP_NO_SSLv3, OP_NO_TLSv1, OP_NO_TLSv1_1, OP_NO_TLSv1_2, OP_SINGLE_DH_USE, OP_SINGLE_ECDH_USE) from _ssl import (ALERT_DESCRIPTION_ACCESS_DENIED, ALERT_DESCRIPTION_BAD_CERTIFICATE, ALERT_DESCRIPTION_BAD_CERTIFICATE_HASH_VALUE, ALERT_DESCRIPTION_BAD_CERTIFICATE_STATUS_RESPONSE, ALERT_DESCRIPTION_BAD_RECORD_MAC, ALERT_DESCRIPTION_CERTIFICATE_EXPIRED, ALERT_DESCRIPTION_CERTIFICATE_REVOKED, ALERT_DESCRIPTION_CERTIFICATE_UNKNOWN, ALERT_DESCRIPTION_CERTIFICATE_UNOBTAINABLE, ALERT_DESCRIPTION_CLOSE_NOTIFY, ALERT_DESCRIPTION_DECODE_ERROR, ALERT_DESCRIPTION_DECOMPRESSION_FAILURE, ALERT_DESCRIPTION_DECRYPT_ERROR, ALERT_DESCRIPTION_HANDSHAKE_FAILURE, ALERT_DESCRIPTION_ILLEGAL_PARAMETER, ALERT_DESCRIPTION_INSUFFICIENT_SECURITY, ALERT_DESCRIPTION_INTERNAL_ERROR, ALERT_DESCRIPTION_NO_RENEGOTIATION, ALERT_DESCRIPTION_PROTOCOL_VERSION, ALERT_DESCRIPTION_RECORD_OVERFLOW, ALERT_DESCRIPTION_UNEXPECTED_MESSAGE, ALERT_DESCRIPTION_UNKNOWN_CA, ALERT_DESCRIPTION_UNKNOWN_PSK_IDENTITY, ALERT_DESCRIPTION_UNRECOGNIZED_NAME, ALERT_DESCRIPTION_UNSUPPORTED_CERTIFICATE, ALERT_DESCRIPTION_UNSUPPORTED_EXTENSION, ALERT_DESCRIPTION_USER_CANCELLED) from _ssl import (SSL_ERROR_EOF, SSL_ERROR_INVALID_ERROR_CODE, SSL_ERROR_SSL, SSL_ERROR_SYSCALL, SSL_ERROR_WANT_CONNECT, SSL_ERROR_WANT_READ, SSL_ERROR_WANT_WRITE, SSL_ERROR_WANT_X509_LOOKUP, SSL_ERROR_ZERO_RETURN) from _ssl import VERIFY_CRL_CHECK_CHAIN, VERIFY_CRL_CHECK_LEAF, VERIFY_DEFAULT, VERIFY_X509_STRICT from _ssl import HAS_SNI, HAS_ECDH, HAS_NPN, HAS_ALPN from _ssl import _OPENSSL_API_VERSION from _ssl import PROTOCOL_SSLv23, PROTOCOL_TLSv1, PROTOCOL_TLSv1_1, PROTOCOL_TLSv1_2 ''') register_module_extender(MANAGER, 'ssl', ssl_transform) astroid-2.0.1/astroid/brain/brain_subprocess.py0000644000076500000240000000703413324063433022412 0ustar claudiustaff00000000000000# Copyright (c) 2016-2017 Claudiu Popa # Copyright (c) 2017 Hugo # Copyright (c) 2018 Bryce Guinta # Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html # For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER import sys import textwrap import six import astroid PY34 = sys.version_info >= (3, 4) PY36 = sys.version_info >= (3, 6) PY33 = sys.version_info >= (3, 3) def _subprocess_transform(): if six.PY3: communicate = (bytes('string', 'ascii'), bytes('string', 'ascii')) communicate_signature = 'def communicate(self, input=None, timeout=None)' if PY36: init = """ def __init__(self, args, bufsize=0, executable=None, stdin=None, stdout=None, stderr=None, preexec_fn=None, close_fds=False, shell=False, cwd=None, env=None, universal_newlines=False, startupinfo=None, creationflags=0, restore_signals=True, start_new_session=False, pass_fds=(), *, encoding=None, errors=None): pass """ else: init = """ def __init__(self, args, bufsize=0, executable=None, stdin=None, stdout=None, stderr=None, preexec_fn=None, close_fds=False, shell=False, cwd=None, env=None, universal_newlines=False, startupinfo=None, creationflags=0, restore_signals=True, start_new_session=False, pass_fds=()): pass """ else: communicate = ('string', 'string') communicate_signature = 'def communicate(self, input=None)' init = """ def __init__(self, args, bufsize=0, executable=None, stdin=None, stdout=None, stderr=None, preexec_fn=None, close_fds=False, shell=False, cwd=None, env=None, universal_newlines=False, startupinfo=None, creationflags=0): pass """ if PY34: wait_signature = 'def wait(self, timeout=None)' else: wait_signature = 'def wait(self)' if six.PY3: ctx_manager = ''' def __enter__(self): return self def __exit__(self, *args): pass ''' else: ctx_manager = '' py3_args = "" if PY33: py3_args = "args = []" code = textwrap.dedent(''' class Popen(object): returncode = pid = 0 stdin = stdout = stderr = file() %(py3_args)s %(communicate_signature)s: return %(communicate)r %(wait_signature)s: return self.returncode def poll(self): return self.returncode def send_signal(self, signal): pass def terminate(self): pass def kill(self): pass %(ctx_manager)s ''' % {'communicate': communicate, 'communicate_signature': communicate_signature, 'wait_signature': wait_signature, 'ctx_manager': ctx_manager, 'py3_args': py3_args, }) init_lines = textwrap.dedent(init).splitlines() indented_init = '\n'.join([' ' * 4 + line for line in init_lines]) code += indented_init return astroid.parse(code) astroid.register_module_extender(astroid.MANAGER, 'subprocess', _subprocess_transform) astroid-2.0.1/astroid/brain/brain_threading.py0000644000076500000240000000134613324063433022167 0ustar claudiustaff00000000000000# -*- coding: utf-8 -*- # Copyright (c) 2016 Claudiu Popa # Copyright (c) 2017 Łukasz Rogalski # Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html # For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER import astroid def _thread_transform(): return astroid.parse(''' class lock(object): def acquire(self, blocking=True): pass def release(self): pass def __enter__(self): return True def __exit__(self, *args): pass def Lock(): return lock() ''') astroid.register_module_extender(astroid.MANAGER, 'threading', _thread_transform) astroid-2.0.1/astroid/brain/brain_typing.py0000644000076500000240000000526513324063433021540 0ustar claudiustaff00000000000000# -*- coding: utf-8 -*- # Copyright (c) 2017-2018 Claudiu Popa # Copyright (c) 2017 Łukasz Rogalski # Copyright (c) 2017 David Euresti # Copyright (c) 2018 Bryce Guinta """Astroid hooks for typing.py support.""" import typing from astroid import ( MANAGER, UseInferenceDefault, extract_node, inference_tip, nodes, InferenceError, ) TYPING_NAMEDTUPLE_BASENAMES = { 'NamedTuple', 'typing.NamedTuple' } TYPING_TYPEVARS = {'TypeVar', 'NewType'} TYPING_TYPEVARS_QUALIFIED = {'typing.TypeVar', 'typing.NewType'} TYPING_TYPE_TEMPLATE = """ class Meta(type): def __getitem__(self, item): return self class {0}(metaclass=Meta): pass """ TYPING_MEMBERS = set(typing.__all__) def looks_like_typing_typevar_or_newtype(node): func = node.func if isinstance(func, nodes.Attribute): return func.attrname in TYPING_TYPEVARS if isinstance(func, nodes.Name): return func.name in TYPING_TYPEVARS return False def infer_typing_typevar_or_newtype(node, context=None): """Infer a typing.TypeVar(...) or typing.NewType(...) call""" try: func = next(node.func.infer(context=context)) except InferenceError as exc: raise UseInferenceDefault from exc if func.qname() not in TYPING_TYPEVARS_QUALIFIED: raise UseInferenceDefault if not node.args: raise UseInferenceDefault typename = node.args[0].as_string().strip("'") node = extract_node(TYPING_TYPE_TEMPLATE.format(typename)) return node.infer(context=context) def _looks_like_typing_subscript(node): """Try to figure out if a Subscript node *might* be a typing-related subscript""" if isinstance(node, nodes.Name): return node.name in TYPING_MEMBERS elif isinstance(node, nodes.Attribute): return node.attrname in TYPING_MEMBERS elif isinstance(node, nodes.Subscript): return _looks_like_typing_subscript(node.value) return False def infer_typing_attr(node, context=None): """Infer a typing.X[...] subscript""" try: value = next(node.value.infer()) except InferenceError as exc: raise UseInferenceDefault from exc if not value.qname().startswith('typing.'): raise UseInferenceDefault node = extract_node(TYPING_TYPE_TEMPLATE.format(value.qname().split('.')[-1])) return node.infer(context=context) MANAGER.register_transform( nodes.Call, inference_tip(infer_typing_typevar_or_newtype), looks_like_typing_typevar_or_newtype ) MANAGER.register_transform( nodes.Subscript, inference_tip(infer_typing_attr), _looks_like_typing_subscript, ) astroid-2.0.1/astroid/brain/brain_uuid.py0000644000076500000240000000107413324063433021166 0ustar claudiustaff00000000000000# Copyright (c) 2017 Claudiu Popa # Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html # For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER """Astroid hooks for the UUID module.""" from astroid import MANAGER from astroid import nodes def _patch_uuid_class(node): # The .int member is patched using __dict__ node.locals['int'] = [nodes.Const(0, parent=node)] MANAGER.register_transform( nodes.ClassDef, _patch_uuid_class, lambda node: node.qname() == 'uuid.UUID' ) astroid-2.0.1/astroid/builder.py0000644000076500000240000003663613324063433017414 0ustar claudiustaff00000000000000# -*- coding: utf-8 -*- # Copyright (c) 2006-2011, 2013-2014 LOGILAB S.A. (Paris, FRANCE) # Copyright (c) 2013 Phil Schaf # Copyright (c) 2014-2018 Claudiu Popa # Copyright (c) 2014-2015 Google, Inc. # Copyright (c) 2014 Alexander Presnyakov # Copyright (c) 2015-2016 Ceridwen # Copyright (c) 2016 Derek Gustafson # Copyright (c) 2017 Łukasz Rogalski # Copyright (c) 2018 Anthony Sottile # Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html # For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER """The AstroidBuilder makes astroid from living object and / or from _ast The builder is not thread safe and can't be used to parse different sources at the same time. """ import os import textwrap from tokenize import detect_encoding from astroid._ast import _parse from astroid import bases from astroid import exceptions from astroid import manager from astroid import modutils from astroid import raw_building from astroid import rebuilder from astroid import nodes from astroid import util # The name of the transient function that is used to # wrap expressions to be extracted when calling # extract_node. _TRANSIENT_FUNCTION = '__' # The comment used to select a statement to be extracted # when calling extract_node. _STATEMENT_SELECTOR = '#@' MANAGER = manager.AstroidManager() def open_source_file(filename): with open(filename, 'rb') as byte_stream: encoding = detect_encoding(byte_stream.readline)[0] stream = open(filename, 'r', newline=None, encoding=encoding) data = stream.read() return stream, encoding, data def _can_assign_attr(node, attrname): try: slots = node.slots() except NotImplementedError: pass else: if slots and attrname not in {slot.value for slot in slots}: return False return True class AstroidBuilder(raw_building.InspectBuilder): """Class for building an astroid tree from source code or from a live module. The param *manager* specifies the manager class which should be used. If no manager is given, then the default one will be used. The param *apply_transforms* determines if the transforms should be applied after the tree was built from source or from a live object, by default being True. """ # pylint: disable=redefined-outer-name def __init__(self, manager=None, apply_transforms=True): super(AstroidBuilder, self).__init__() self._manager = manager or MANAGER self._apply_transforms = apply_transforms def module_build(self, module, modname=None): """Build an astroid from a living module instance.""" node = None path = getattr(module, '__file__', None) if path is not None: path_, ext = os.path.splitext(modutils._path_from_filename(path)) if ext in ('.py', '.pyc', '.pyo') and os.path.exists(path_ + '.py'): node = self.file_build(path_ + '.py', modname) if node is None: # this is a built-in module # get a partial representation by introspection node = self.inspect_build(module, modname=modname, path=path) if self._apply_transforms: # We have to handle transformation by ourselves since the # rebuilder isn't called for builtin nodes node = self._manager.visit_transforms(node) return node def file_build(self, path, modname=None): """Build astroid from a source code file (i.e. from an ast) *path* is expected to be a python source file """ try: stream, encoding, data = open_source_file(path) except IOError as exc: raise exceptions.AstroidBuildingError( 'Unable to load file {path}:\n{error}', modname=modname, path=path, error=exc) from exc except (SyntaxError, LookupError) as exc: raise exceptions.AstroidSyntaxError( 'Python 3 encoding specification error or unknown encoding:\n' '{error}', modname=modname, path=path, error=exc) from exc except UnicodeError as exc: # wrong encoding # detect_encoding returns utf-8 if no encoding specified raise exceptions.AstroidBuildingError( 'Wrong or no encoding specified for {filename}.', filename=path) from exc with stream: # get module name if necessary if modname is None: try: modname = '.'.join(modutils.modpath_from_file(path)) except ImportError: modname = os.path.splitext(os.path.basename(path))[0] # build astroid representation module = self._data_build(data, modname, path) return self._post_build(module, encoding) def string_build(self, data, modname='', path=None): """Build astroid from source code string.""" module = self._data_build(data, modname, path) module.file_bytes = data.encode('utf-8') return self._post_build(module, 'utf-8') def _post_build(self, module, encoding): """Handles encoding and delayed nodes after a module has been built""" module.file_encoding = encoding self._manager.cache_module(module) # post tree building steps after we stored the module in the cache: for from_node in module._import_from_nodes: if from_node.modname == '__future__': for symbol, _ in from_node.names: module.future_imports.add(symbol) self.add_from_names_to_locals(from_node) # handle delayed assattr nodes for delayed in module._delayed_assattr: self.delayed_assattr(delayed) # Visit the transforms if self._apply_transforms: module = self._manager.visit_transforms(module) return module def _data_build(self, data, modname, path): """Build tree node from data and add some informations""" try: node = _parse(data + '\n') except (TypeError, ValueError, SyntaxError) as exc: raise exceptions.AstroidSyntaxError( 'Parsing Python code failed:\n{error}', source=data, modname=modname, path=path, error=exc) from exc if path is not None: node_file = os.path.abspath(path) else: node_file = '' if modname.endswith('.__init__'): modname = modname[:-9] package = True else: package = path is not None and os.path.splitext(os.path.basename(path))[0] == '__init__' builder = rebuilder.TreeRebuilder(self._manager) module = builder.visit_module(node, modname, node_file, package) module._import_from_nodes = builder._import_from_nodes module._delayed_assattr = builder._delayed_assattr return module def add_from_names_to_locals(self, node): """Store imported names to the locals Resort the locals if coming from a delayed node """ _key_func = lambda node: node.fromlineno def sort_locals(my_list): my_list.sort(key=_key_func) for (name, asname) in node.names: if name == '*': try: imported = node.do_import_module() except exceptions.AstroidBuildingError: continue for name in imported.public_names(): node.parent.set_local(name, node) sort_locals(node.parent.scope().locals[name]) else: node.parent.set_local(asname or name, node) sort_locals(node.parent.scope().locals[asname or name]) def delayed_assattr(self, node): """Visit a AssAttr node This adds name to locals and handle members definition. """ try: frame = node.frame() for inferred in node.expr.infer(): if inferred is util.Uninferable: continue try: if inferred.__class__ is bases.Instance: inferred = inferred._proxied iattrs = inferred.instance_attrs if not _can_assign_attr(inferred, node.attrname): continue elif isinstance(inferred, bases.Instance): # Const, Tuple, ... we may be wrong, may be not, but # anyway we don't want to pollute builtin's namespace continue elif inferred.is_function: iattrs = inferred.instance_attrs else: iattrs = inferred.locals except AttributeError: # XXX log error continue values = iattrs.setdefault(node.attrname, []) if node in values: continue # get assign in __init__ first XXX useful ? if (frame.name == '__init__' and values and values[0].frame().name != '__init__'): values.insert(0, node) else: values.append(node) except exceptions.InferenceError: pass def build_namespace_package_module(name, path): return nodes.Module(name, doc='', path=path, package=True) def parse(code, module_name='', path=None, apply_transforms=True): """Parses a source string in order to obtain an astroid AST from it :param str code: The code for the module. :param str module_name: The name for the module, if any :param str path: The path for the module :param bool apply_transforms: Apply the transforms for the give code. Use it if you don't want the default transforms to be applied. """ code = textwrap.dedent(code) builder = AstroidBuilder(manager=MANAGER, apply_transforms=apply_transforms) return builder.string_build(code, modname=module_name, path=path) def _extract_expressions(node): """Find expressions in a call to _TRANSIENT_FUNCTION and extract them. The function walks the AST recursively to search for expressions that are wrapped into a call to _TRANSIENT_FUNCTION. If it finds such an expression, it completely removes the function call node from the tree, replacing it by the wrapped expression inside the parent. :param node: An astroid node. :type node: astroid.bases.NodeNG :yields: The sequence of wrapped expressions on the modified tree expression can be found. """ if (isinstance(node, nodes.Call) and isinstance(node.func, nodes.Name) and node.func.name == _TRANSIENT_FUNCTION): real_expr = node.args[0] real_expr.parent = node.parent # Search for node in all _astng_fields (the fields checked when # get_children is called) of its parent. Some of those fields may # be lists or tuples, in which case the elements need to be checked. # When we find it, replace it by real_expr, so that the AST looks # like no call to _TRANSIENT_FUNCTION ever took place. for name in node.parent._astroid_fields: child = getattr(node.parent, name) if isinstance(child, (list, tuple)): for idx, compound_child in enumerate(child): if compound_child is node: child[idx] = real_expr elif child is node: setattr(node.parent, name, real_expr) yield real_expr else: for child in node.get_children(): yield from _extract_expressions(child) def _find_statement_by_line(node, line): """Extracts the statement on a specific line from an AST. If the line number of node matches line, it will be returned; otherwise its children are iterated and the function is called recursively. :param node: An astroid node. :type node: astroid.bases.NodeNG :param line: The line number of the statement to extract. :type line: int :returns: The statement on the line, or None if no statement for the line can be found. :rtype: astroid.bases.NodeNG or None """ if isinstance(node, (nodes.ClassDef, nodes.FunctionDef)): # This is an inaccuracy in the AST: the nodes that can be # decorated do not carry explicit information on which line # the actual definition (class/def), but .fromline seems to # be close enough. node_line = node.fromlineno else: node_line = node.lineno if node_line == line: return node for child in node.get_children(): result = _find_statement_by_line(child, line) if result: return result return None def extract_node(code, module_name=''): """Parses some Python code as a module and extracts a designated AST node. Statements: To extract one or more statement nodes, append #@ to the end of the line Examples: >>> def x(): >>> def y(): >>> return 1 #@ The return statement will be extracted. >>> class X(object): >>> def meth(self): #@ >>> pass The function object 'meth' will be extracted. Expressions: To extract arbitrary expressions, surround them with the fake function call __(...). After parsing, the surrounded expression will be returned and the whole AST (accessible via the returned node's parent attribute) will look like the function call was never there in the first place. Examples: >>> a = __(1) The const node will be extracted. >>> def x(d=__(foo.bar)): pass The node containing the default argument will be extracted. >>> def foo(a, b): >>> return 0 < __(len(a)) < b The node containing the function call 'len' will be extracted. If no statements or expressions are selected, the last toplevel statement will be returned. If the selected statement is a discard statement, (i.e. an expression turned into a statement), the wrapped expression is returned instead. For convenience, singleton lists are unpacked. :param str code: A piece of Python code that is parsed as a module. Will be passed through textwrap.dedent first. :param str module_name: The name of the module. :returns: The designated node from the parse tree, or a list of nodes. :rtype: astroid.bases.NodeNG, or a list of nodes. """ def _extract(node): if isinstance(node, nodes.Expr): return node.value return node requested_lines = [] for idx, line in enumerate(code.splitlines()): if line.strip().endswith(_STATEMENT_SELECTOR): requested_lines.append(idx + 1) tree = parse(code, module_name=module_name) if not tree.body: raise ValueError('Empty tree, cannot extract from it') extracted = [] if requested_lines: extracted = [_find_statement_by_line(tree, line) for line in requested_lines] # Modifies the tree. extracted.extend(_extract_expressions(tree)) if not extracted: extracted.append(tree.body[-1]) extracted = [_extract(node) for node in extracted] if len(extracted) == 1: return extracted[0] return extracted astroid-2.0.1/astroid/context.py0000644000076500000240000001174013324063433017437 0ustar claudiustaff00000000000000# Copyright (c) 2015-2016, 2018 Claudiu Popa # Copyright (c) 2015-2016 Ceridwen # Copyright (c) 2018 Bryce Guinta # Copyright (c) 2018 Nick Drozd # Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html # For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER """Various context related utilities, including inference and call contexts.""" import contextlib import copy import pprint class InferenceContext: """Provide context for inference Store already inferred nodes to save time Account for already visited nodes to infinite stop infinite recursion """ __slots__ = ('path', 'lookupname', 'callcontext', 'boundnode', 'inferred', 'extra_context') def __init__(self, path=None, inferred=None): self.path = path or set() """ :type: set(tuple(NodeNG, optional(str))) Path of visited nodes and their lookupname Currently this key is ``(node, context.lookupname)`` """ self.lookupname = None """ :type: optional[str] The original name of the node e.g. foo = 1 The inference of 'foo' is nodes.Const(1) but the lookup name is 'foo' """ self.callcontext = None """ :type: optional[CallContext] The call arguments and keywords for the given context """ self.boundnode = None """ :type: optional[NodeNG] The bound node of the given context e.g. the bound node of object.__new__(cls) is the object node """ self.inferred = inferred or {} """ :type: dict(seq, seq) Inferred node contexts to their mapped results Currently the key is ``(node, lookupname, callcontext, boundnode)`` and the value is tuple of the inferred results """ self.extra_context = {} """ :type: dict(NodeNG, Context) Context that needs to be passed down through call stacks for call arguments """ def push(self, node): """Push node into inference path :return: True if node is already in context path else False :rtype: bool Allows one to see if the given node has already been looked at for this inference context""" name = self.lookupname if (node, name) in self.path: return True self.path.add((node, name)) return False def clone(self): """Clone inference path For example, each side of a binary operation (BinOp) starts with the same context but diverge as each side is inferred so the InferenceContext will need be cloned""" # XXX copy lookupname/callcontext ? clone = InferenceContext(copy.copy(self.path), inferred=self.inferred) clone.callcontext = self.callcontext clone.boundnode = self.boundnode clone.extra_context = self.extra_context return clone def cache_generator(self, key, generator): """Cache result of generator into dictionary Used to cache inference results""" results = [] for result in generator: results.append(result) yield result self.inferred[key] = tuple(results) @contextlib.contextmanager def restore_path(self): path = set(self.path) yield self.path = path def __str__(self): state = ('%s=%s' % (field, pprint.pformat(getattr(self, field), width=80 - len(field))) for field in self.__slots__) return '%s(%s)' % (type(self).__name__, ',\n '.join(state)) class CallContext: """Holds information for a call site.""" __slots__ = ('args', 'keywords') def __init__(self, args, keywords=None): """ :param List[NodeNG] args: Call positional arguments :param Union[List[nodes.Keyword], None] keywords: Call keywords """ self.args = args if keywords: keywords = [(arg.arg, arg.value) for arg in keywords] else: keywords = [] self.keywords = keywords def copy_context(context): if context is not None: return context.clone() return InferenceContext() def bind_context_to_node(context, node): """Give a context a boundnode to retrieve the correct function name or attribute value with from further inference. Do not use an existing context since the boundnode could then be incorrectly propagated higher up in the call stack. :param context: Context to use :type context: Optional(context) :param node: Node to do name lookups from :type node NodeNG: :returns: A new context :rtype: InferenceContext """ if context is not None: context = context.clone() else: context = InferenceContext() context.boundnode = node return context astroid-2.0.1/astroid/decorators.py0000644000076500000240000001072213324063433020117 0ustar claudiustaff00000000000000# Copyright (c) 2015-2016, 2018 Claudiu Popa # Copyright (c) 2015-2016 Ceridwen # Copyright (c) 2015 Florian Bruhin # Copyright (c) 2016 Derek Gustafson # Copyright (c) 2018 Nick Drozd # Copyright (c) 2018 Ashley Whetter # Copyright (c) 2018 HoverHell # Copyright (c) 2018 Bryce Guinta # Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html # For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER """ A few useful function/method decorators.""" import functools import wrapt from astroid import context as contextmod from astroid import exceptions from astroid import util @wrapt.decorator def cached(func, instance, args, kwargs): """Simple decorator to cache result of method calls without args.""" cache = getattr(instance, '__cache', None) if cache is None: instance.__cache = cache = {} try: return cache[func] except KeyError: cache[func] = result = func(*args, **kwargs) return result class cachedproperty: """ Provides a cached property equivalent to the stacking of @cached and @property, but more efficient. After first usage, the becomes part of the object's __dict__. Doing: del obj. empties the cache. Idea taken from the pyramid_ framework and the mercurial_ project. .. _pyramid: http://pypi.python.org/pypi/pyramid .. _mercurial: http://pypi.python.org/pypi/Mercurial """ __slots__ = ('wrapped',) def __init__(self, wrapped): try: wrapped.__name__ except AttributeError as exc: raise TypeError('%s must have a __name__ attribute' % wrapped) from exc self.wrapped = wrapped @property def __doc__(self): doc = getattr(self.wrapped, '__doc__', None) return ('%s' % ('\n%s' % doc if doc else '')) def __get__(self, inst, objtype=None): if inst is None: return self val = self.wrapped(inst) setattr(inst, self.wrapped.__name__, val) return val def path_wrapper(func): """return the given infer function wrapped to handle the path Used to stop inference if the node has already been looked at for a given `InferenceContext` to prevent infinite recursion """ @functools.wraps(func) def wrapped(node, context=None, _func=func, **kwargs): """wrapper function handling context""" if context is None: context = contextmod.InferenceContext() if context.push(node): return None yielded = set() generator = _func(node, context, **kwargs) try: while True: res = next(generator) # unproxy only true instance, not const, tuple, dict... if res.__class__.__name__ == 'Instance': ares = res._proxied else: ares = res if ares not in yielded: yield res yielded.add(ares) except StopIteration as error: # Explicit StopIteration to return error information, see # comment in raise_if_nothing_inferred. if error.args: return error.args[0] return None return wrapped @wrapt.decorator def yes_if_nothing_inferred(func, instance, args, kwargs): inferred = False for node in func(*args, **kwargs): inferred = True yield node if not inferred: yield util.Uninferable @wrapt.decorator def raise_if_nothing_inferred(func, instance, args, kwargs): """All generators wrapped with raise_if_nothing_inferred *must* explicitly raise StopIteration with information to create an appropriate structured InferenceError. """ inferred = False try: generator = func(*args, **kwargs) while True: yield next(generator) inferred = True except StopIteration as error: if not inferred: if error.args: # pylint: disable=not-a-mapping raise exceptions.InferenceError(**error.args[0]) else: raise exceptions.InferenceError( 'StopIteration raised without any error information.') astroid-2.0.1/astroid/exceptions.py0000644000076500000240000001560713324063433020142 0ustar claudiustaff00000000000000# Copyright (c) 2007, 2009-2010, 2013 LOGILAB S.A. (Paris, FRANCE) # Copyright (c) 2014 Google, Inc. # Copyright (c) 2015-2018 Claudiu Popa # Copyright (c) 2015-2016 Ceridwen # Copyright (c) 2016 Derek Gustafson # Copyright (c) 2018 Bryce Guinta # Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html # For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER """this module contains exceptions used in the astroid library """ from astroid import util class AstroidError(Exception): """base exception class for all astroid related exceptions AstroidError and its subclasses are structured, intended to hold objects representing state when the exception is thrown. Field values are passed to the constructor as keyword-only arguments. Each subclass has its own set of standard fields, but use your best judgment to decide whether a specific exception instance needs more or fewer fields for debugging. Field values may be used to lazily generate the error message: self.message.format() will be called with the field names and values supplied as keyword arguments. """ def __init__(self, message='', **kws): super(AstroidError, self).__init__(message) self.message = message for key, value in kws.items(): setattr(self, key, value) def __str__(self): return self.message.format(**vars(self)) class AstroidBuildingError(AstroidError): """exception class when we are unable to build an astroid representation Standard attributes: modname: Name of the module that AST construction failed for. error: Exception raised during construction. """ def __init__(self, message='Failed to import module {modname}.', **kws): super(AstroidBuildingError, self).__init__(message, **kws) class AstroidImportError(AstroidBuildingError): """Exception class used when a module can't be imported by astroid.""" class TooManyLevelsError(AstroidImportError): """Exception class which is raised when a relative import was beyond the top-level. Standard attributes: level: The level which was attempted. name: the name of the module on which the relative import was attempted. """ level = None name = None def __init__(self, message='Relative import with too many levels ' '({level}) for module {name!r}', **kws): super(TooManyLevelsError, self).__init__(message, **kws) class AstroidSyntaxError(AstroidBuildingError): """Exception class used when a module can't be parsed.""" class NoDefault(AstroidError): """raised by function's `default_value` method when an argument has no default value Standard attributes: func: Function node. name: Name of argument without a default. """ func = None name = None def __init__(self, message='{func!r} has no default for {name!r}.', **kws): super(NoDefault, self).__init__(message, **kws) class ResolveError(AstroidError): """Base class of astroid resolution/inference error. ResolveError is not intended to be raised. Standard attributes: context: InferenceContext object. """ context = None class MroError(ResolveError): """Error raised when there is a problem with method resolution of a class. Standard attributes: mros: A sequence of sequences containing ClassDef nodes. cls: ClassDef node whose MRO resolution failed. context: InferenceContext object. """ mros = () cls = None def __str__(self): mro_names = ", ".join("({})".format(", ".join(b.name for b in m)) for m in self.mros) return self.message.format(mros=mro_names, cls=self.cls) class DuplicateBasesError(MroError): """Error raised when there are duplicate bases in the same class bases.""" class InconsistentMroError(MroError): """Error raised when a class's MRO is inconsistent.""" class SuperError(ResolveError): """Error raised when there is a problem with a *super* call. Standard attributes: *super_*: The Super instance that raised the exception. context: InferenceContext object. """ super_ = None def __str__(self): return self.message.format(**vars(self.super_)) class InferenceError(ResolveError): """raised when we are unable to infer a node Standard attributes: node: The node inference was called on. context: InferenceContext object. """ node = None context = None def __init__(self, message='Inference failed for {node!r}.', **kws): super(InferenceError, self).__init__(message, **kws) # Why does this inherit from InferenceError rather than ResolveError? # Changing it causes some inference tests to fail. class NameInferenceError(InferenceError): """Raised when a name lookup fails, corresponds to NameError. Standard attributes: name: The name for which lookup failed, as a string. scope: The node representing the scope in which the lookup occurred. context: InferenceContext object. """ name = None scope = None def __init__(self, message='{name!r} not found in {scope!r}.', **kws): super(NameInferenceError, self).__init__(message, **kws) class AttributeInferenceError(ResolveError): """Raised when an attribute lookup fails, corresponds to AttributeError. Standard attributes: target: The node for which lookup failed. attribute: The attribute for which lookup failed, as a string. context: InferenceContext object. """ target = None attribute = None def __init__(self, message='{attribute!r} not found on {target!r}.', **kws): super(AttributeInferenceError, self).__init__(message, **kws) class UseInferenceDefault(Exception): """exception to be raised in custom inference function to indicate that it should go back to the default behaviour """ class _NonDeducibleTypeHierarchy(Exception): """Raised when is_subtype / is_supertype can't deduce the relation between two types.""" class AstroidIndexError(AstroidError): """Raised when an Indexable / Mapping does not have an index / key.""" class AstroidTypeError(AstroidError): """Raised when a TypeError would be expected in Python code.""" class InferenceOverwriteError(AstroidError): """Raised when an inference tip is overwritten Currently only used for debugging. """ # Backwards-compatibility aliases OperationError = util.BadOperationMessage UnaryOperationError = util.BadUnaryOperationMessage BinaryOperationError = util.BadBinaryOperationMessage SuperArgumentTypeError = SuperError UnresolvableName = NameInferenceError NotFoundError = AttributeInferenceError AstroidBuildingException = AstroidBuildingError astroid-2.0.1/astroid/helpers.py0000644000076500000240000002200413324063433017410 0ustar claudiustaff00000000000000# Copyright (c) 2015-2018 Claudiu Popa # Copyright (c) 2015-2016 Ceridwen # Copyright (c) 2018 Bryce Guinta # Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html # For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER """ Various helper utilities. """ import builtins as builtins_mod from astroid import bases from astroid import context as contextmod from astroid import exceptions from astroid import manager from astroid import nodes from astroid import raw_building from astroid import scoped_nodes from astroid import util BUILTINS = builtins_mod.__name__ def _build_proxy_class(cls_name, builtins): proxy = raw_building.build_class(cls_name) proxy.parent = builtins return proxy def _function_type(function, builtins): if isinstance(function, scoped_nodes.Lambda): if function.root().name == BUILTINS: cls_name = 'builtin_function_or_method' else: cls_name = 'function' elif isinstance(function, bases.BoundMethod): cls_name = 'method' elif isinstance(function, bases.UnboundMethod): cls_name = 'function' return _build_proxy_class(cls_name, builtins) def _object_type(node, context=None): astroid_manager = manager.AstroidManager() builtins = astroid_manager.astroid_cache[BUILTINS] context = context or contextmod.InferenceContext() for inferred in node.infer(context=context): if isinstance(inferred, scoped_nodes.ClassDef): if inferred.newstyle: metaclass = inferred.metaclass() if metaclass: yield metaclass continue yield builtins.getattr('type')[0] elif isinstance(inferred, (scoped_nodes.Lambda, bases.UnboundMethod)): yield _function_type(inferred, builtins) elif isinstance(inferred, scoped_nodes.Module): yield _build_proxy_class('module', builtins) else: yield inferred._proxied def object_type(node, context=None): """Obtain the type of the given node This is used to implement the ``type`` builtin, which means that it's used for inferring type calls, as well as used in a couple of other places in the inference. The node will be inferred first, so this function can support all sorts of objects, as long as they support inference. """ try: types = set(_object_type(node, context)) except exceptions.InferenceError: return util.Uninferable if len(types) > 1 or not types: return util.Uninferable return list(types)[0] def _object_type_is_subclass(obj_type, class_or_seq, context=None): if not isinstance(class_or_seq, (tuple, list)): class_seq = (class_or_seq,) else: class_seq = class_or_seq if obj_type is util.Uninferable: return util.Uninferable # Instances are not types class_seq = [item if not isinstance(item, bases.Instance) else util.Uninferable for item in class_seq] # strict compatibility with issubclass # issubclass(type, (object, 1)) evaluates to true # issubclass(object, (1, type)) raises TypeError for klass in class_seq: if klass is util.Uninferable: raise exceptions.AstroidTypeError("arg 2 must be a type or tuple of types") for obj_subclass in obj_type.mro(): if obj_subclass == klass: return True return False def object_isinstance(node, class_or_seq, context=None): """Check if a node 'isinstance' any node in class_or_seq :param node: A given node :param class_or_seq: Union[nodes.NodeNG, Sequence[nodes.NodeNG]] :rtype: bool :raises AstroidTypeError: if the given ``classes_or_seq`` are not types """ obj_type = object_type(node, context) if obj_type is util.Uninferable: return util.Uninferable return _object_type_is_subclass(obj_type, class_or_seq, context=context) def object_issubclass(node, class_or_seq, context=None): """Check if a type is a subclass of any node in class_or_seq :param node: A given node :param class_or_seq: Union[Nodes.NodeNG, Sequence[nodes.NodeNG]] :rtype: bool :raises AstroidTypeError: if the given ``classes_or_seq`` are not types :raises AstroidError: if the type of the given node cannot be inferred or its type's mro doesn't work """ if not isinstance(node, nodes.ClassDef): raise TypeError("{node} needs to be a ClassDef node".format(node=node)) return _object_type_is_subclass(node, class_or_seq, context=context) def safe_infer(node, context=None): """Return the inferred value for the given node. Return None if inference failed or if there is some ambiguity (more than one node has been inferred). """ try: inferit = node.infer(context=context) value = next(inferit) except exceptions.InferenceError: return None try: next(inferit) return None # None if there is ambiguity on the inferred node except exceptions.InferenceError: return None# there is some kind of ambiguity except StopIteration: return value def has_known_bases(klass, context=None): """Return true if all base classes of a class could be inferred.""" try: return klass._all_bases_known except AttributeError: pass for base in klass.bases: result = safe_infer(base, context=context) # TODO: check for A->B->A->B pattern in class structure too? if (not isinstance(result, scoped_nodes.ClassDef) or result is klass or not has_known_bases(result, context=context)): klass._all_bases_known = False return False klass._all_bases_known = True return True def _type_check(type1, type2): if not all(map(has_known_bases, (type1, type2))): raise exceptions._NonDeducibleTypeHierarchy if not all([type1.newstyle, type2.newstyle]): return False try: return type1 in type2.mro()[:-1] except exceptions.MroError: # The MRO is invalid. raise exceptions._NonDeducibleTypeHierarchy def is_subtype(type1, type2): """Check if *type1* is a subtype of *typ2*.""" return _type_check(type2, type1) def is_supertype(type1, type2): """Check if *type2* is a supertype of *type1*.""" return _type_check(type1, type2) def class_instance_as_index(node): """Get the value as an index for the given instance. If an instance provides an __index__ method, then it can be used in some scenarios where an integer is expected, for instance when multiplying or subscripting a list. """ context = contextmod.InferenceContext() context.callcontext = contextmod.CallContext(args=[node]) try: for inferred in node.igetattr('__index__', context=context): if not isinstance(inferred, bases.BoundMethod): continue for result in inferred.infer_call_result(node, context=context): if (isinstance(result, nodes.Const) and isinstance(result.value, int)): return result except exceptions.InferenceError: pass return None def object_len(node, context=None): """Infer length of given node object :param Union[nodes.ClassDef, nodes.Instance] node: :param node: Node to infer length of :raises AstroidTypeError: If an invalid node is returned from __len__ method or no __len__ method exists :raises InferenceError: If the given node cannot be inferred or if multiple nodes are inferred :rtype int: Integer length of node """ from astroid.objects import FrozenSet inferred_node = safe_infer(node, context=context) if inferred_node is None or inferred_node is util.Uninferable: raise exceptions.InferenceError(node=node) if (isinstance(inferred_node, nodes.Const) and isinstance(inferred_node.value, (bytes, str))): return len(inferred_node.value) if isinstance(inferred_node, (nodes.List, nodes.Set, nodes.Tuple, FrozenSet)): return len(inferred_node.elts) if isinstance(inferred_node, nodes.Dict): return len(inferred_node.items) try: node_type = object_type(inferred_node, context=context) len_call = next(node_type.igetattr("__len__", context=context)) except exceptions.AttributeInferenceError: raise exceptions.AstroidTypeError( "object of type '{}' has no len()" .format(len_call.pytype())) try: result_of_len = next(len_call.infer_call_result(node, context)) # Remove StopIteration catch when #507 is fixed except StopIteration: raise exceptions.InferenceError(node=node) if isinstance(result_of_len, nodes.Const) and result_of_len.pytype() == "builtins.int": return result_of_len.value raise exceptions.AstroidTypeError( "'{}' object cannot be interpreted as an integer" .format(result_of_len)) astroid-2.0.1/astroid/inference.py0000644000076500000240000007736013324063433017723 0ustar claudiustaff00000000000000# -*- coding: utf-8 -*- # Copyright (c) 2006-2011, 2013-2014 LOGILAB S.A. (Paris, FRANCE) # Copyright (c) 2012 FELD Boris # Copyright (c) 2013-2014 Google, Inc. # Copyright (c) 2014-2018 Claudiu Popa # Copyright (c) 2014 Eevee (Alex Munroe) # Copyright (c) 2015-2016 Ceridwen # Copyright (c) 2015 Dmitry Pribysh # Copyright (c) 2016 Jakub Wilk # Copyright (c) 2017 Michał Masłowski # Copyright (c) 2017 Calen Pennington # Copyright (c) 2017 Łukasz Rogalski # Copyright (c) 2018 Bryce Guinta # Copyright (c) 2018 Nick Drozd # Copyright (c) 2018 Ashley Whetter # Copyright (c) 2018 HoverHell # Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html # For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER """this module contains a set of functions to handle inference on astroid trees """ import functools import itertools import operator from astroid import bases from astroid import context as contextmod from astroid import exceptions from astroid import decorators from astroid import helpers from astroid import manager from astroid import nodes from astroid.interpreter import dunder_lookup from astroid import protocols from astroid import util MANAGER = manager.AstroidManager() # .infer method ############################################################### def infer_end(self, context=None): """inference's end for node such as Module, ClassDef, FunctionDef, Const... """ yield self nodes.Module._infer = infer_end nodes.ClassDef._infer = infer_end nodes.FunctionDef._infer = infer_end nodes.Lambda._infer = infer_end nodes.Const._infer = infer_end nodes.Slice._infer = infer_end def infer_seq(self, context=None): if not any(isinstance(e, nodes.Starred) for e in self.elts): yield self else: values = _infer_seq(self, context) new_seq = type(self)(self.lineno, self.col_offset, self.parent) new_seq.postinit(values) yield new_seq def _infer_seq(node, context=None): """Infer all values based on _BaseContainer.elts""" values = [] for elt in node.elts: if isinstance(elt, nodes.Starred): starred = helpers.safe_infer(elt.value, context) if starred in (None, util.Uninferable): raise exceptions.InferenceError(node=node, context=context) if not hasattr(starred, 'elts'): raise exceptions.InferenceError(node=node, context=context) values.extend(_infer_seq(starred)) else: values.append(elt) return values nodes.List._infer = infer_seq nodes.Tuple._infer = infer_seq nodes.Set._infer = infer_seq def infer_map(self, context=None): if not any(isinstance(k, nodes.DictUnpack) for k, _ in self.items): yield self else: items = _infer_map(self, context) new_seq = type(self)(self.lineno, self.col_offset, self.parent) new_seq.postinit(list(items.items())) yield new_seq def _update_with_replacement(lhs_dict, rhs_dict): """Delete nodes that equate to duplicate keys Since an astroid node doesn't 'equal' another node with the same value, this function uses the as_string method to make sure duplicate keys don't get through Note that both the key and the value are astroid nodes Fixes issue with DictUnpack causing duplicte keys in inferred Dict items :param dict(nodes.NodeNG, nodes.NodeNG) lhs_dict: Dictionary to 'merge' nodes into :param dict(nodes.NodeNG, nodes.NodeNG) rhs_dict: Dictionary with nodes to pull from :return dict(nodes.NodeNG, nodes.NodeNG): merged dictionary of nodes """ combined_dict = itertools.chain(lhs_dict.items(), rhs_dict.items()) # Overwrite keys which have the same string values string_map = {key.as_string(): (key, value) for key, value in combined_dict} # Return to dictionary return dict(string_map.values()) def _infer_map(node, context): """Infer all values based on Dict.items""" values = {} for name, value in node.items: if isinstance(name, nodes.DictUnpack): double_starred = helpers.safe_infer(value, context) if double_starred in (None, util.Uninferable): raise exceptions.InferenceError if not isinstance(double_starred, nodes.Dict): raise exceptions.InferenceError(node=node, context=context) unpack_items = _infer_map(double_starred, context) values = _update_with_replacement(values, unpack_items) else: key = helpers.safe_infer(name, context=context) value = helpers.safe_infer(value, context=context) if any(elem in (None, util.Uninferable) for elem in (key, value)): raise exceptions.InferenceError(node=node, context=context) values = _update_with_replacement(values, {key: value}) return values nodes.Dict._infer = infer_map def _higher_function_scope(node): """ Search for the first function which encloses the given scope. This can be used for looking up in that function's scope, in case looking up in a lower scope for a particular name fails. :param node: A scope node. :returns: ``None``, if no parent function scope was found, otherwise an instance of :class:`astroid.scoped_nodes.Function`, which encloses the given node. """ current = node while current.parent and not isinstance(current.parent, nodes.FunctionDef): current = current.parent if current and current.parent: return current.parent return None def infer_name(self, context=None): """infer a Name: use name lookup rules""" frame, stmts = self.lookup(self.name) if not stmts: # Try to see if the name is enclosed in a nested function # and use the higher (first function) scope for searching. parent_function = _higher_function_scope(self.scope()) if parent_function: _, stmts = parent_function.lookup(self.name) if not stmts: raise exceptions.NameInferenceError(name=self.name, scope=self.scope(), context=context) context = context.clone() context.lookupname = self.name return bases._infer_stmts(stmts, context, frame) nodes.Name._infer = decorators.path_wrapper(infer_name) nodes.AssignName.infer_lhs = infer_name # won't work with a path wrapper @decorators.raise_if_nothing_inferred @decorators.path_wrapper def infer_call(self, context=None): """infer a Call node by trying to guess what the function returns""" callcontext = context.clone() callcontext.callcontext = contextmod.CallContext(args=self.args, keywords=self.keywords) callcontext.boundnode = None extra_context = {} if context is not None: extra_context = _populate_context_lookup(self, context.clone()) callcontext.extra_context = extra_context for callee in self.func.infer(context): if callee is util.Uninferable: yield callee continue try: if hasattr(callee, 'infer_call_result'): yield from callee.infer_call_result( caller=self, context=callcontext, ) except exceptions.InferenceError: ## XXX log error ? continue # Explicit StopIteration to return error information, see comment # in raise_if_nothing_inferred. return dict(node=self, context=context) nodes.Call._infer = infer_call @decorators.path_wrapper def infer_import(self, context=None, asname=True): """infer an Import node: return the imported module/object""" name = context.lookupname if name is None: raise exceptions.InferenceError(node=self, context=context) try: if asname: yield self.do_import_module(self.real_name(name)) else: yield self.do_import_module(name) except exceptions.AstroidBuildingError as exc: raise exceptions.InferenceError( node=self, context=context, ) from exc nodes.Import._infer = infer_import def infer_name_module(self, name): context = contextmod.InferenceContext() context.lookupname = name return self.infer(context, asname=False) nodes.Import.infer_name_module = infer_name_module @decorators.path_wrapper def infer_import_from(self, context=None, asname=True): """infer a ImportFrom node: return the imported module/object""" name = context.lookupname if name is None: raise exceptions.InferenceError(node=self, context=context) if asname: name = self.real_name(name) try: module = self.do_import_module() except exceptions.AstroidBuildingError as exc: raise exceptions.InferenceError( node=self, context=context, ) from exc try: context = contextmod.copy_context(context) context.lookupname = name stmts = module.getattr(name, ignore_locals=module is self.root()) return bases._infer_stmts(stmts, context) except exceptions.AttributeInferenceError as error: raise exceptions.InferenceError( error.message, target=self, attribute=name, context=context, ) from error nodes.ImportFrom._infer = infer_import_from @decorators.raise_if_nothing_inferred def infer_attribute(self, context=None): """infer an Attribute node by using getattr on the associated object""" for owner in self.expr.infer(context): if owner is util.Uninferable: yield owner continue if context and context.boundnode: # This handles the situation where the attribute is accessed through a subclass # of a base class and the attribute is defined at the base class's level, # by taking in consideration a redefinition in the subclass. if (isinstance(owner, bases.Instance) and isinstance(context.boundnode, bases.Instance)): try: if helpers.is_subtype(helpers.object_type(context.boundnode), helpers.object_type(owner)): owner = context.boundnode except exceptions._NonDeducibleTypeHierarchy: # Can't determine anything useful. pass try: context.boundnode = owner yield from owner.igetattr(self.attrname, context) context.boundnode = None except (exceptions.AttributeInferenceError, exceptions.InferenceError): context.boundnode = None except AttributeError: # XXX method / function context.boundnode = None # Explicit StopIteration to return error information, see comment # in raise_if_nothing_inferred. return dict(node=self, context=context) nodes.Attribute._infer = decorators.path_wrapper(infer_attribute) nodes.AssignAttr.infer_lhs = infer_attribute # # won't work with a path wrapper @decorators.path_wrapper def infer_global(self, context=None): if context.lookupname is None: raise exceptions.InferenceError(node=self, context=context) try: return bases._infer_stmts(self.root().getattr(context.lookupname), context) except exceptions.AttributeInferenceError as error: raise exceptions.InferenceError( error.message, target=self, attribute=context.lookupname, context=context, ) from error nodes.Global._infer = infer_global _SUBSCRIPT_SENTINEL = object() @decorators.raise_if_nothing_inferred def infer_subscript(self, context=None): """Inference for subscripts We're understanding if the index is a Const or a slice, passing the result of inference to the value's `getitem` method, which should handle each supported index type accordingly. """ try: value = next(self.value.infer(context)) except StopIteration: return None if value is util.Uninferable: yield util.Uninferable return None try: index = next(self.slice.infer(context)) except StopIteration: return None if index is util.Uninferable: yield util.Uninferable return None # Try to deduce the index value. index_value = _SUBSCRIPT_SENTINEL if value.__class__ == bases.Instance: index_value = index else: if index.__class__ == bases.Instance: instance_as_index = helpers.class_instance_as_index(index) if instance_as_index: index_value = instance_as_index else: index_value = index if index_value is _SUBSCRIPT_SENTINEL: raise exceptions.InferenceError(node=self, context=context) try: assigned = value.getitem(index_value, context) except (exceptions.AstroidTypeError, exceptions.AstroidIndexError, exceptions.AttributeInferenceError, AttributeError) as exc: raise exceptions.InferenceError(node=self, context=context) from exc # Prevent inferring if the inferred subscript # is the same as the original subscripted object. if self is assigned or assigned is util.Uninferable: yield util.Uninferable return None yield from assigned.infer(context) # Explicit StopIteration to return error information, see comment # in raise_if_nothing_inferred. return dict(node=self, context=context) nodes.Subscript._infer = decorators.path_wrapper(infer_subscript) nodes.Subscript.infer_lhs = infer_subscript @decorators.raise_if_nothing_inferred @decorators.path_wrapper def _infer_boolop(self, context=None): """Infer a boolean operation (and / or / not). The function will calculate the boolean operation for all pairs generated through inference for each component node. """ values = self.values if self.op == 'or': predicate = operator.truth else: predicate = operator.not_ try: values = [value.infer(context=context) for value in values] except exceptions.InferenceError: yield util.Uninferable return None for pair in itertools.product(*values): if any(item is util.Uninferable for item in pair): # Can't infer the final result, just yield Uninferable. yield util.Uninferable continue bool_values = [item.bool_value() for item in pair] if any(item is util.Uninferable for item in bool_values): # Can't infer the final result, just yield Uninferable. yield util.Uninferable continue # Since the boolean operations are short circuited operations, # this code yields the first value for which the predicate is True # and if no value respected the predicate, then the last value will # be returned (or Uninferable if there was no last value). # This is conforming to the semantics of `and` and `or`: # 1 and 0 -> 1 # 0 and 1 -> 0 # 1 or 0 -> 1 # 0 or 1 -> 1 value = util.Uninferable for value, bool_value in zip(pair, bool_values): if predicate(bool_value): yield value break else: yield value # Explicit StopIteration to return error information, see comment # in raise_if_nothing_inferred. return dict(node=self, context=context) nodes.BoolOp._infer = _infer_boolop # UnaryOp, BinOp and AugAssign inferences def _filter_operation_errors(self, infer_callable, context, error): for result in infer_callable(self, context): if isinstance(result, error): # For the sake of .infer(), we don't care about operation # errors, which is the job of pylint. So return something # which shows that we can't infer the result. yield util.Uninferable else: yield result def _infer_unaryop(self, context=None): """Infer what an UnaryOp should return when evaluated.""" for operand in self.operand.infer(context): try: yield operand.infer_unary_op(self.op) except TypeError as exc: # The operand doesn't support this operation. yield util.BadUnaryOperationMessage(operand, self.op, exc) except AttributeError as exc: meth = protocols.UNARY_OP_METHOD[self.op] if meth is None: # `not node`. Determine node's boolean # value and negate its result, unless it is # Uninferable, which will be returned as is. bool_value = operand.bool_value() if bool_value is not util.Uninferable: yield nodes.const_factory(not bool_value) else: yield util.Uninferable else: if not isinstance(operand, (bases.Instance, nodes.ClassDef)): # The operation was used on something which # doesn't support it. yield util.BadUnaryOperationMessage(operand, self.op, exc) continue try: try: methods = dunder_lookup.lookup(operand, meth) except exceptions.AttributeInferenceError: yield util.BadUnaryOperationMessage(operand, self.op, exc) continue meth = methods[0] inferred = next(meth.infer(context=context)) if inferred is util.Uninferable or not inferred.callable(): continue context = contextmod.copy_context(context) context.callcontext = contextmod.CallContext(args=[operand]) call_results = inferred.infer_call_result(self, context=context) result = next(call_results, None) if result is None: # Failed to infer, return the same type. yield operand else: yield result except exceptions.AttributeInferenceError as exc: # The unary operation special method was not found. yield util.BadUnaryOperationMessage(operand, self.op, exc) except exceptions.InferenceError: yield util.Uninferable @decorators.raise_if_nothing_inferred @decorators.path_wrapper def infer_unaryop(self, context=None): """Infer what an UnaryOp should return when evaluated.""" yield from _filter_operation_errors(self, _infer_unaryop, context, util.BadUnaryOperationMessage) # Explicit StopIteration to return error information, see comment # in raise_if_nothing_inferred. return dict(node=self, context=context) nodes.UnaryOp._infer_unaryop = _infer_unaryop nodes.UnaryOp._infer = infer_unaryop def _is_not_implemented(const): """Check if the given const node is NotImplemented.""" return isinstance(const, nodes.Const) and const.value is NotImplemented def _invoke_binop_inference(instance, opnode, op, other, context, method_name): """Invoke binary operation inference on the given instance.""" methods = dunder_lookup.lookup(instance, method_name) context = contextmod.bind_context_to_node(context, instance) method = methods[0] inferred = next(method.infer(context=context)) if inferred is util.Uninferable: raise exceptions.InferenceError return instance.infer_binary_op(opnode, op, other, context, inferred) def _aug_op(instance, opnode, op, other, context, reverse=False): """Get an inference callable for an augmented binary operation.""" method_name = protocols.AUGMENTED_OP_METHOD[op] return functools.partial(_invoke_binop_inference, instance=instance, op=op, opnode=opnode, other=other, context=context, method_name=method_name) def _bin_op(instance, opnode, op, other, context, reverse=False): """Get an inference callable for a normal binary operation. If *reverse* is True, then the reflected method will be used instead. """ if reverse: method_name = protocols.REFLECTED_BIN_OP_METHOD[op] else: method_name = protocols.BIN_OP_METHOD[op] return functools.partial(_invoke_binop_inference, instance=instance, op=op, opnode=opnode, other=other, context=context, method_name=method_name) def _get_binop_contexts(context, left, right): """Get contexts for binary operations. This will return two inferrence contexts, the first one for x.__op__(y), the other one for y.__rop__(x), where only the arguments are inversed. """ # The order is important, since the first one should be # left.__op__(right). for arg in (right, left): new_context = context.clone() new_context.callcontext = contextmod.CallContext(args=[arg]) new_context.boundnode = None yield new_context def _same_type(type1, type2): """Check if type1 is the same as type2.""" return type1.qname() == type2.qname() def _get_binop_flow(left, left_type, binary_opnode, right, right_type, context, reverse_context): """Get the flow for binary operations. The rules are a bit messy: * if left and right have the same type, then only one method will be called, left.__op__(right) * if left and right are unrelated typewise, then first left.__op__(right) is tried and if this does not exist or returns NotImplemented, then right.__rop__(left) is tried. * if left is a subtype of right, then only left.__op__(right) is tried. * if left is a supertype of right, then right.__rop__(left) is first tried and then left.__op__(right) """ op = binary_opnode.op if _same_type(left_type, right_type): methods = [_bin_op(left, binary_opnode, op, right, context)] elif helpers.is_subtype(left_type, right_type): methods = [_bin_op(left, binary_opnode, op, right, context)] elif helpers.is_supertype(left_type, right_type): methods = [_bin_op(right, binary_opnode, op, left, reverse_context, reverse=True), _bin_op(left, binary_opnode, op, right, context)] else: methods = [_bin_op(left, binary_opnode, op, right, context), _bin_op(right, binary_opnode, op, left, reverse_context, reverse=True)] return methods def _get_aug_flow(left, left_type, aug_opnode, right, right_type, context, reverse_context): """Get the flow for augmented binary operations. The rules are a bit messy: * if left and right have the same type, then left.__augop__(right) is first tried and then left.__op__(right). * if left and right are unrelated typewise, then left.__augop__(right) is tried, then left.__op__(right) is tried and then right.__rop__(left) is tried. * if left is a subtype of right, then left.__augop__(right) is tried and then left.__op__(right). * if left is a supertype of right, then left.__augop__(right) is tried, then right.__rop__(left) and then left.__op__(right) """ bin_op = aug_opnode.op.strip("=") aug_op = aug_opnode.op if _same_type(left_type, right_type): methods = [_aug_op(left, aug_opnode, aug_op, right, context), _bin_op(left, aug_opnode, bin_op, right, context)] elif helpers.is_subtype(left_type, right_type): methods = [_aug_op(left, aug_opnode, aug_op, right, context), _bin_op(left, aug_opnode, bin_op, right, context)] elif helpers.is_supertype(left_type, right_type): methods = [_aug_op(left, aug_opnode, aug_op, right, context), _bin_op(right, aug_opnode, bin_op, left, reverse_context, reverse=True), _bin_op(left, aug_opnode, bin_op, right, context)] else: methods = [_aug_op(left, aug_opnode, aug_op, right, context), _bin_op(left, aug_opnode, bin_op, right, context), _bin_op(right, aug_opnode, bin_op, left, reverse_context, reverse=True)] return methods def _infer_binary_operation(left, right, binary_opnode, context, flow_factory): """Infer a binary operation between a left operand and a right operand This is used by both normal binary operations and augmented binary operations, the only difference is the flow factory used. """ context, reverse_context = _get_binop_contexts(context, left, right) left_type = helpers.object_type(left) right_type = helpers.object_type(right) methods = flow_factory(left, left_type, binary_opnode, right, right_type, context, reverse_context) for method in methods: try: results = list(method()) except AttributeError: continue except exceptions.AttributeInferenceError: continue except exceptions.InferenceError: yield util.Uninferable return else: if any(result is util.Uninferable for result in results): yield util.Uninferable return if all(map(_is_not_implemented, results)): continue not_implemented = sum(1 for result in results if _is_not_implemented(result)) if not_implemented and not_implemented != len(results): # Can't infer yet what this is. yield util.Uninferable return for result in results: yield result return # The operation doesn't seem to be supported so let the caller know about it yield util.BadBinaryOperationMessage(left_type, binary_opnode.op, right_type) def _infer_binop(self, context): """Binary operation inferrence logic.""" if context is None: context = contextmod.InferenceContext() left = self.left right = self.right # we use two separate contexts for evaluating lhs and rhs because # 1. evaluating lhs may leave some undesired entries in context.path # which may not let us infer right value of rhs lhs_context = context.clone() rhs_context = context.clone() for lhs in left.infer(context=lhs_context): if lhs is util.Uninferable: # Don't know how to process this. yield util.Uninferable return for rhs in right.infer(context=rhs_context): if rhs is util.Uninferable: # Don't know how to process this. yield util.Uninferable return try: yield from _infer_binary_operation( lhs, rhs, self, context, _get_binop_flow) except exceptions._NonDeducibleTypeHierarchy: yield util.Uninferable @decorators.yes_if_nothing_inferred @decorators.path_wrapper def infer_binop(self, context=None): return _filter_operation_errors(self, _infer_binop, context, util.BadBinaryOperationMessage) nodes.BinOp._infer_binop = _infer_binop nodes.BinOp._infer = infer_binop def _infer_augassign(self, context=None): """Inference logic for augmented binary operations.""" if context is None: context = contextmod.InferenceContext() for lhs in self.target.infer_lhs(context=context): if lhs is util.Uninferable: # Don't know how to process this. yield util.Uninferable return rhs_context = context.clone() for rhs in self.value.infer(context=rhs_context): if rhs is util.Uninferable: # Don't know how to process this. yield util.Uninferable return try: yield from _infer_binary_operation(lhs, rhs, self, context, _get_aug_flow) except exceptions._NonDeducibleTypeHierarchy: yield util.Uninferable @decorators.path_wrapper def infer_augassign(self, context=None): return _filter_operation_errors(self, _infer_augassign, context, util.BadBinaryOperationMessage) nodes.AugAssign._infer_augassign = _infer_augassign nodes.AugAssign._infer = infer_augassign # End of binary operation inference. def infer_arguments(self, context=None): name = context.lookupname if name is None: raise exceptions.InferenceError(node=self, context=context) return protocols._arguments_infer_argname(self, name, context) nodes.Arguments._infer = infer_arguments @decorators.path_wrapper def infer_assign(self, context=None): """infer a AssignName/AssignAttr: need to inspect the RHS part of the assign node """ stmt = self.statement() if isinstance(stmt, nodes.AugAssign): return stmt.infer(context) stmts = list(self.assigned_stmts(context=context)) return bases._infer_stmts(stmts, context) nodes.AssignName._infer = infer_assign nodes.AssignAttr._infer = infer_assign # no infer method on DelName and DelAttr (expected InferenceError) @decorators.path_wrapper def infer_empty_node(self, context=None): if not self.has_underlying_object(): yield util.Uninferable else: try: yield from MANAGER.infer_ast_from_something(self.object, context=context) except exceptions.AstroidError: yield util.Uninferable nodes.EmptyNode._infer = infer_empty_node def infer_index(self, context=None): return self.value.infer(context) nodes.Index._infer = infer_index # TODO: move directly into bases.Instance when the dependency hell # will be solved. def instance_getitem(self, index, context=None): # Rewrap index to Const for this case new_context = contextmod.bind_context_to_node(context, self) if not context: context = new_context # Create a new callcontext for providing index as an argument. new_context.callcontext = contextmod.CallContext(args=[index]) method = next(self.igetattr('__getitem__', context=context), None) if not isinstance(method, bases.BoundMethod): raise exceptions.InferenceError( 'Could not find __getitem__ for {node!r}.', node=self, context=context) try: return next(method.infer_call_result(self, new_context)) except StopIteration as exc: raise exceptions.InferenceError( message='Inference for {node!r}[{index!s}] failed.', node=self, index=index, context=context) from exc bases.Instance.getitem = instance_getitem def _populate_context_lookup(call, context): # Allows context to be saved for later # for inference inside a function context_lookup = {} if context is None: return context_lookup for arg in call.args: if isinstance(arg, nodes.Starred): context_lookup[arg.value] = context else: context_lookup[arg] = context keywords = call.keywords if call.keywords is not None else [] for keyword in keywords: context_lookup[keyword.value] = context return context_lookup astroid-2.0.1/astroid/interpreter/0000755000076500000240000000000013324065077017747 5ustar claudiustaff00000000000000astroid-2.0.1/astroid/interpreter/__init__.py0000644000076500000240000000000013324063433022040 0ustar claudiustaff00000000000000astroid-2.0.1/astroid/interpreter/_import/0000755000076500000240000000000013324065077021420 5ustar claudiustaff00000000000000astroid-2.0.1/astroid/interpreter/_import/__init__.py0000644000076500000240000000000013324063433023511 0ustar claudiustaff00000000000000astroid-2.0.1/astroid/interpreter/_import/spec.py0000644000076500000240000002535013324063433022723 0ustar claudiustaff00000000000000# Copyright (c) 2016-2018 Claudiu Popa # Copyright (c) 2016 Derek Gustafson # Copyright (c) 2017 Chris Philip # Copyright (c) 2017 Hugo # Copyright (c) 2017 ioanatia # Copyright (c) 2017 Calen Pennington # Copyright (c) 2018 Nick Drozd import abc import collections import enum import imp import os import sys import zipimport try: import importlib.machinery _HAS_MACHINERY = True except ImportError: _HAS_MACHINERY = False try: from functools import lru_cache except ImportError: from backports.functools_lru_cache import lru_cache from . import util ModuleType = enum.Enum('ModuleType', 'C_BUILTIN C_EXTENSION PKG_DIRECTORY ' 'PY_CODERESOURCE PY_COMPILED PY_FROZEN PY_RESOURCE ' 'PY_SOURCE PY_ZIPMODULE PY_NAMESPACE') _ImpTypes = {imp.C_BUILTIN: ModuleType.C_BUILTIN, imp.C_EXTENSION: ModuleType.C_EXTENSION, imp.PKG_DIRECTORY: ModuleType.PKG_DIRECTORY, imp.PY_COMPILED: ModuleType.PY_COMPILED, imp.PY_FROZEN: ModuleType.PY_FROZEN, imp.PY_SOURCE: ModuleType.PY_SOURCE, } if hasattr(imp, 'PY_RESOURCE'): _ImpTypes[imp.PY_RESOURCE] = ModuleType.PY_RESOURCE if hasattr(imp, 'PY_CODERESOURCE'): _ImpTypes[imp.PY_CODERESOURCE] = ModuleType.PY_CODERESOURCE def _imp_type_to_module_type(imp_type): return _ImpTypes[imp_type] _ModuleSpec = collections.namedtuple('_ModuleSpec', 'name type location ' 'origin submodule_search_locations') class ModuleSpec(_ModuleSpec): """Defines a class similar to PEP 420's ModuleSpec A module spec defines a name of a module, its type, location and where submodules can be found, if the module is a package. """ def __new__(cls, name, module_type, location=None, origin=None, submodule_search_locations=None): return _ModuleSpec.__new__(cls, name=name, type=module_type, location=location, origin=origin, submodule_search_locations=submodule_search_locations) class Finder: """A finder is a class which knows how to find a particular module.""" def __init__(self, path=None): self._path = path or sys.path @abc.abstractmethod def find_module(self, modname, module_parts, processed, submodule_path): """Find the given module Each finder is responsible for each protocol of finding, as long as they all return a ModuleSpec. :param str modname: The module which needs to be searched. :param list module_parts: It should be a list of strings, where each part contributes to the module's namespace. :param list processed: What parts from the module parts were processed so far. :param list submodule_path: A list of paths where the module can be looked into. :returns: A ModuleSpec, describing how and where the module was found, None, otherwise. """ def contribute_to_path(self, spec, processed): """Get a list of extra paths where this finder can search.""" class ImpFinder(Finder): """A finder based on the imp module.""" def find_module(self, modname, module_parts, processed, submodule_path): if submodule_path is not None: submodule_path = list(submodule_path) try: stream, mp_filename, mp_desc = imp.find_module(modname, submodule_path) except ImportError: return None # Close resources. if stream: stream.close() return ModuleSpec(name=modname, location=mp_filename, module_type=_imp_type_to_module_type(mp_desc[2])) def contribute_to_path(self, spec, processed): if spec.location is None: # Builtin. return None if _is_setuptools_namespace(spec.location): # extend_path is called, search sys.path for module/packages # of this name see pkgutil.extend_path documentation path = [os.path.join(p, *processed) for p in sys.path if os.path.isdir(os.path.join(p, *processed))] else: path = [spec.location] return path class ExplicitNamespacePackageFinder(ImpFinder): """A finder for the explicit namespace packages, generated through pkg_resources.""" def find_module(self, modname, module_parts, processed, submodule_path): if processed: modname = '.'.join(processed + [modname]) if util.is_namespace(modname) and modname in sys.modules: submodule_path = sys.modules[modname].__path__ return ModuleSpec(name=modname, location='', origin='namespace', module_type=ModuleType.PY_NAMESPACE, submodule_search_locations=submodule_path) return None def contribute_to_path(self, spec, processed): return spec.submodule_search_locations class ZipFinder(Finder): """Finder that knows how to find a module inside zip files.""" def __init__(self, path): super(ZipFinder, self).__init__(path) self._zipimporters = _precache_zipimporters(path) def find_module(self, modname, module_parts, processed, submodule_path): try: file_type, filename, path = _search_zip(module_parts, self._zipimporters) except ImportError: return None return ModuleSpec(name=modname, location=filename, origin='egg', module_type=file_type, submodule_search_locations=path) class PathSpecFinder(Finder): """Finder based on importlib.machinery.PathFinder.""" def find_module(self, modname, module_parts, processed, submodule_path): spec = importlib.machinery.PathFinder.find_spec(modname, path=submodule_path) if spec: # origin can be either a string on older Python versions # or None in case it is a namespace package: # https://github.com/python/cpython/pull/5481 is_namespace_pkg = spec.origin in ('namespace', None) location = spec.origin if not is_namespace_pkg else None module_type = ModuleType.PY_NAMESPACE if is_namespace_pkg else None spec = ModuleSpec(name=spec.name, location=location, origin=spec.origin, module_type=module_type, submodule_search_locations=list(spec.submodule_search_locations or [])) return spec def contribute_to_path(self, spec, processed): if spec.type == ModuleType.PY_NAMESPACE: return spec.submodule_search_locations return None _SPEC_FINDERS = ( ImpFinder, ZipFinder, ) if _HAS_MACHINERY and sys.version_info[:2] >= (3, 4): _SPEC_FINDERS += (PathSpecFinder, ) _SPEC_FINDERS += (ExplicitNamespacePackageFinder, ) def _is_setuptools_namespace(location): try: with open(os.path.join(location, '__init__.py'), 'rb') as stream: data = stream.read(4096) except IOError: pass else: extend_path = b'pkgutil' in data and b'extend_path' in data declare_namespace = ( b"pkg_resources" in data and b"declare_namespace(__name__)" in data) return extend_path or declare_namespace @lru_cache() def _cached_set_diff(left, right): result = set(left) result.difference_update(right) return result def _precache_zipimporters(path=None): pic = sys.path_importer_cache # When measured, despite having the same complexity (O(n)), # converting to tuples and then caching the conversion to sets # and the set difference is faster than converting to sets # and then only caching the set difference. req_paths = tuple(path or sys.path) cached_paths = tuple(pic) new_paths = _cached_set_diff(req_paths, cached_paths) for entry_path in new_paths: try: pic[entry_path] = zipimport.zipimporter(entry_path) except zipimport.ZipImportError: continue return pic def _search_zip(modpath, pic): for filepath, importer in list(pic.items()): if importer is not None: found = importer.find_module(modpath[0]) if found: if not importer.find_module(os.path.sep.join(modpath)): raise ImportError('No module named %s in %s/%s' % ( '.'.join(modpath[1:]), filepath, modpath)) #import code; code.interact(local=locals()) return (ModuleType.PY_ZIPMODULE, os.path.abspath(filepath) + os.path.sep + os.path.sep.join(modpath), filepath) raise ImportError('No module named %s' % '.'.join(modpath)) def _find_spec_with_path(search_path, modname, module_parts, processed, submodule_path): finders = [finder(search_path) for finder in _SPEC_FINDERS] for finder in finders: spec = finder.find_module(modname, module_parts, processed, submodule_path) if spec is None: continue return finder, spec raise ImportError('No module named %s' % '.'.join(module_parts)) def find_spec(modpath, path=None): """Find a spec for the given module. :type modpath: list or tuple :param modpath: split module's name (i.e name of a module or package split on '.'), with leading empty strings for explicit relative import :type path: list or None :param path: optional list of path where the module or package should be searched (use sys.path if nothing or None is given) :rtype: ModuleSpec :return: A module spec, which describes how the module was found and where. """ _path = path or sys.path # Need a copy for not mutating the argument. modpath = modpath[:] submodule_path = None module_parts = modpath[:] processed = [] while modpath: modname = modpath.pop(0) finder, spec = _find_spec_with_path(_path, modname, module_parts, processed, submodule_path or path) processed.append(modname) if modpath: submodule_path = finder.contribute_to_path(spec, processed) if spec.type == ModuleType.PKG_DIRECTORY: spec = spec._replace(submodule_search_locations=submodule_path) return spec astroid-2.0.1/astroid/interpreter/_import/util.py0000644000076500000240000000041513324063433022741 0ustar claudiustaff00000000000000# Copyright (c) 2016, 2018 Claudiu Popa try: import pkg_resources except ImportError: pkg_resources = None def is_namespace(modname): return (pkg_resources is not None and modname in pkg_resources._namespace_packages) astroid-2.0.1/astroid/interpreter/dunder_lookup.py0000644000076500000240000000450513324063433023171 0ustar claudiustaff00000000000000# Copyright (c) 2016-2018 Claudiu Popa # Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html # For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER """Contains logic for retrieving special methods. This implementation does not rely on the dot attribute access logic, found in ``.getattr()``. The difference between these two is that the dunder methods are looked with the type slots (you can find more about these here http://lucumr.pocoo.org/2014/8/16/the-python-i-would-like-to-see/) As such, the lookup for the special methods is actually simpler than the dot attribute access. """ import itertools import astroid from astroid import exceptions def _lookup_in_mro(node, name): attrs = node.locals.get(name, []) nodes = itertools.chain.from_iterable( ancestor.locals.get(name, []) for ancestor in node.ancestors(recurs=True) ) values = list(itertools.chain(attrs, nodes)) if not values: raise exceptions.AttributeInferenceError( attribute=name, target=node ) return values def lookup(node, name): """Lookup the given special method name in the given *node* If the special method was found, then a list of attributes will be returned. Otherwise, `astroid.AttributeInferenceError` is going to be raised. """ if isinstance(node, (astroid.List, astroid.Tuple, astroid.Const, astroid.Dict, astroid.Set)): return _builtin_lookup(node, name) if isinstance(node, astroid.Instance): return _lookup_in_mro(node, name) if isinstance(node, astroid.ClassDef): return _class_lookup(node, name) raise exceptions.AttributeInferenceError( attribute=name, target=node ) def _class_lookup(node, name): metaclass = node.metaclass() if metaclass is None: raise exceptions.AttributeInferenceError( attribute=name, target=node ) return _lookup_in_mro(metaclass, name) def _builtin_lookup(node, name): values = node.locals.get(name, []) if not values: raise exceptions.AttributeInferenceError( attribute=name, target=node ) return values astroid-2.0.1/astroid/interpreter/objectmodel.py0000644000076500000240000005064613324063433022615 0ustar claudiustaff00000000000000# Copyright (c) 2016-2018 Claudiu Popa # Copyright (c) 2016 Derek Gustafson # Copyright (c) 2017-2018 Bryce Guinta # Copyright (c) 2017 Ceridwen # Copyright (c) 2017 Calen Pennington # Copyright (c) 2018 Nick Drozd # Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html # For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER """ Data object model, as per https://docs.python.org/3/reference/datamodel.html. This module describes, at least partially, a data object model for some of astroid's nodes. The model contains special attributes that nodes such as functions, classes, modules etc have, such as __doc__, __class__, __module__ etc, being used when doing attribute lookups over nodes. For instance, inferring `obj.__class__` will first trigger an inference of the `obj` variable. If it was succesfully inferred, then an attribute `__class__ will be looked for in the inferred object. This is the part where the data model occurs. The model is attached to those nodes and the lookup mechanism will try to see if attributes such as `__class__` are defined by the model or not. If they are defined, the model will be requested to return the corresponding value of that attribute. Thus the model can be viewed as a special part of the lookup mechanism. """ import builtins import itertools import pprint import os import types from functools import lru_cache import astroid from astroid import context as contextmod from astroid import exceptions from astroid import node_classes def _dunder_dict(instance, attributes): obj = node_classes.Dict(parent=instance) # Convert the keys to node strings keys = [node_classes.Const(value=value, parent=obj) for value in list(attributes.keys())] # The original attribute has a list of elements for each key, # but that is not useful for retrieving the special attribute's value. # In this case, we're picking the last value from each list. values = [elem[-1] for elem in attributes.values()] obj.postinit(list(zip(keys, values))) return obj class ObjectModel: def __init__(self): self._instance = None def __repr__(self): result = [] cname = type(self).__name__ string = '%(cname)s(%(fields)s)' alignment = len(cname) + 1 for field in sorted(self.attributes()): width = 80 - len(field) - alignment lines = pprint.pformat(field, indent=2, width=width).splitlines(True) inner = [lines[0]] for line in lines[1:]: inner.append(' ' * alignment + line) result.append(field) return string % {'cname': cname, 'fields': (',\n' + ' ' * alignment).join(result)} def __call__(self, instance): self._instance = instance return self def __get__(self, instance, cls=None): # ObjectModel needs to be a descriptor so that just doing # `special_attributes = SomeObjectModel` should be enough in the body of a node. # But at the same time, node.special_attributes should return an object # which can be used for manipulating the special attributes. That's the reason # we pass the instance through which it got accessed to ObjectModel.__call__, # returning itself afterwards, so we can still have access to the # underlying data model and to the instance for which it got accessed. return self(instance) def __contains__(self, name): return name in self.attributes() @lru_cache(maxsize=None) def attributes(self): """Get the attributes which are exported by this object model.""" return [obj[2:] for obj in dir(self) if obj.startswith('py')] def lookup(self, name): """Look up the given *name* in the current model It should return an AST or an interpreter object, but if the name is not found, then an AttributeInferenceError will be raised. """ if name in self.attributes(): return getattr(self, "py" + name) raise exceptions.AttributeInferenceError(target=self._instance, attribute=name) class ModuleModel(ObjectModel): def _builtins(self): builtins_ast_module = astroid.MANAGER.astroid_cache[builtins.__name__] return builtins_ast_module.special_attributes.lookup('__dict__') @property def pybuiltins(self): return self._builtins() # __path__ is a standard attribute on *packages* not # non-package modules. The only mention of it in the # official 2.7 documentation I can find is in the # tutorial. @property def py__path__(self): if not self._instance.package: raise exceptions.AttributeInferenceError(target=self._instance, attribute='__path__') path_objs = [ node_classes.Const( value=path if not path.endswith('__init__.py') else os.path.dirname(path), parent=self._instance ) for path in self._instance.path ] container = node_classes.List(parent=self._instance) container.postinit(path_objs) return container @property def py__name__(self): return node_classes.Const(value=self._instance.name, parent=self._instance) @property def py__doc__(self): return node_classes.Const(value=self._instance.doc, parent=self._instance) @property def py__file__(self): return node_classes.Const(value=self._instance.file, parent=self._instance) @property def py__dict__(self): return _dunder_dict(self._instance, self._instance.globals) # __package__ isn't mentioned anywhere outside a PEP: # https://www.python.org/dev/peps/pep-0366/ @property def py__package__(self): if not self._instance.package: value = '' else: value = self._instance.name return node_classes.Const(value=value, parent=self._instance) # These are related to the Python 3 implementation of the # import system, # https://docs.python.org/3/reference/import.html#import-related-module-attributes @property def py__spec__(self): # No handling for now. return node_classes.Unknown() @property def py__loader__(self): # No handling for now. return node_classes.Unknown() @property def py__cached__(self): # No handling for now. return node_classes.Unknown() class FunctionModel(ObjectModel): @property def py__name__(self): return node_classes.Const(value=self._instance.name, parent=self._instance) @property def py__doc__(self): return node_classes.Const(value=self._instance.doc, parent=self._instance) @property def py__qualname__(self): return node_classes.Const(value=self._instance.qname(), parent=self._instance) @property def py__defaults__(self): func = self._instance if not func.args.defaults: return node_classes.Const(value=None, parent=func) defaults_obj = node_classes.Tuple(parent=func) defaults_obj.postinit(func.args.defaults) return defaults_obj @property def py__annotations__(self): obj = node_classes.Dict(parent=self._instance) if not self._instance.returns: returns = None else: returns = self._instance.returns args = self._instance.args pair_annotations = itertools.chain( zip(args.args or [], args.annotations), zip(args.kwonlyargs, args.kwonlyargs_annotations) ) annotations = { arg.name: annotation for (arg, annotation) in pair_annotations if annotation } if args.varargannotation: annotations[args.vararg] = args.varargannotation if args.kwargannotation: annotations[args.kwarg] = args.kwargannotation if returns: annotations['return'] = returns items = [(node_classes.Const(key, parent=obj), value) for (key, value) in annotations.items()] obj.postinit(items) return obj @property def py__dict__(self): return node_classes.Dict(parent=self._instance) py__globals__ = py__dict__ @property def py__kwdefaults__(self): def _default_args(args, parent): for arg in args.kwonlyargs: try: default = args.default_value(arg.name) except exceptions.NoDefault: continue name = node_classes.Const(arg.name, parent=parent) yield name, default args = self._instance.args obj = node_classes.Dict(parent=self._instance) defaults = dict(_default_args(args, obj)) obj.postinit(list(defaults.items())) return obj @property def py__module__(self): return node_classes.Const(self._instance.root().qname()) @property def py__get__(self): from astroid import bases func = self._instance class DescriptorBoundMethod(bases.BoundMethod): """Bound method which knows how to understand calling descriptor binding.""" def implicit_parameters(self): # Different than BoundMethod since the signature # is different. return 0 def infer_call_result(self, caller, context=None): if len(caller.args) != 2: raise exceptions.InferenceError( "Invalid arguments for descriptor binding", target=self, context=context) context = contextmod.copy_context(context) cls = next(caller.args[0].infer(context=context)) if cls is astroid.Uninferable: raise exceptions.InferenceError( "Invalid class inferred", target=self, context=context) # For some reason func is a Node that the below # code is not expecting if isinstance(func, bases.BoundMethod): yield func return # Rebuild the original value, but with the parent set as the # class where it will be bound. new_func = func.__class__(name=func.name, doc=func.doc, lineno=func.lineno, col_offset=func.col_offset, parent=cls) # pylint: disable=no-member new_func.postinit(func.args, func.body, func.decorators, func.returns) # Build a proper bound method that points to our newly built function. proxy = bases.UnboundMethod(new_func) yield bases.BoundMethod(proxy=proxy, bound=cls) @property def args(self): """Overwrite the underlying args to match those of the underlying func Usually the underlying *func* is a function/method, as in: def test(self): pass This has only the *self* parameter but when we access test.__get__ we get a new object which has two parameters, *self* and *type*. """ nonlocal func params = func.args.args.copy() params.append(astroid.AssignName(name='type')) arguments = astroid.Arguments(parent=func.args.parent,) arguments.postinit( args=params, defaults=[], kwonlyargs=[], kw_defaults=[], annotations=[], ) return arguments return DescriptorBoundMethod(proxy=self._instance, bound=self._instance) # These are here just for completion. @property def py__ne__(self): return node_classes.Unknown() py__subclasshook__ = py__ne__ py__str__ = py__ne__ py__sizeof__ = py__ne__ py__setattr__ = py__ne__ py__repr__ = py__ne__ py__reduce__ = py__ne__ py__reduce_ex__ = py__ne__ py__new__ = py__ne__ py__lt__ = py__ne__ py__eq__ = py__ne__ py__gt__ = py__ne__ py__format__ = py__ne__ py__delattr__ = py__ne__ py__getattribute__ = py__ne__ py__hash__ = py__ne__ py__init__ = py__ne__ py__dir__ = py__ne__ py__call__ = py__ne__ py__class__ = py__ne__ py__closure__ = py__ne__ py__code__ = py__ne__ class ClassModel(ObjectModel): @property def py__module__(self): return node_classes.Const(self._instance.root().qname()) @property def py__name__(self): return node_classes.Const(self._instance.name) @property def py__qualname__(self): return node_classes.Const(self._instance.qname()) @property def py__doc__(self): return node_classes.Const(self._instance.doc) @property def py__mro__(self): if not self._instance.newstyle: raise exceptions.AttributeInferenceError(target=self._instance, attribute='__mro__') mro = self._instance.mro() obj = node_classes.Tuple(parent=self._instance) obj.postinit(mro) return obj @property def pymro(self): if not self._instance.newstyle: raise exceptions.AttributeInferenceError(target=self._instance, attribute='mro') from astroid import bases other_self = self # Cls.mro is a method and we need to return one in order to have a proper inference. # The method we're returning is capable of inferring the underlying MRO though. class MroBoundMethod(bases.BoundMethod): def infer_call_result(self, caller, context=None): yield other_self.py__mro__ implicit_metaclass = self._instance.implicit_metaclass() mro_method = implicit_metaclass.locals['mro'][0] return MroBoundMethod(proxy=mro_method, bound=implicit_metaclass) @property def py__bases__(self): obj = node_classes.Tuple() context = contextmod.InferenceContext() elts = list(self._instance._inferred_bases(context)) obj.postinit(elts=elts) return obj @property def py__class__(self): from astroid import helpers return helpers.object_type(self._instance) @property def py__subclasses__(self): """Get the subclasses of the underlying class This looks only in the current module for retrieving the subclasses, thus it might miss a couple of them. """ from astroid import bases from astroid import scoped_nodes if not self._instance.newstyle: raise exceptions.AttributeInferenceError(target=self._instance, attribute='__subclasses__') qname = self._instance.qname() root = self._instance.root() classes = [cls for cls in root.nodes_of_class(scoped_nodes.ClassDef) if cls != self._instance and cls.is_subtype_of(qname)] obj = node_classes.List(parent=self._instance) obj.postinit(classes) class SubclassesBoundMethod(bases.BoundMethod): def infer_call_result(self, caller, context=None): yield obj implicit_metaclass = self._instance.implicit_metaclass() subclasses_method = implicit_metaclass.locals['__subclasses__'][0] return SubclassesBoundMethod(proxy=subclasses_method, bound=implicit_metaclass) @property def py__dict__(self): return node_classes.Dict(parent=self._instance) class SuperModel(ObjectModel): @property def py__thisclass__(self): return self._instance.mro_pointer @property def py__self_class__(self): return self._instance._self_class @property def py__self__(self): return self._instance.type @property def py__class__(self): return self._instance._proxied class UnboundMethodModel(ObjectModel): @property def py__class__(self): from astroid import helpers return helpers.object_type(self._instance) @property def py__func__(self): return self._instance._proxied @property def py__self__(self): return node_classes.Const(value=None, parent=self._instance) pyim_func = py__func__ pyim_class = py__class__ pyim_self = py__self__ class BoundMethodModel(FunctionModel): @property def py__func__(self): return self._instance._proxied._proxied @property def py__self__(self): return self._instance.bound class GeneratorModel(FunctionModel): def __new__(cls, *args, **kwargs): # Append the values from the GeneratorType unto this object. ret = super(GeneratorModel, cls).__new__(cls, *args, **kwargs) generator = astroid.MANAGER.astroid_cache[builtins.__name__]['generator'] for name, values in generator.locals.items(): method = values[0] patched = lambda cls, meth=method: meth setattr(type(ret), 'py' + name, property(patched)) return ret @property def py__name__(self): return node_classes.Const(value=self._instance.parent.name, parent=self._instance) @property def py__doc__(self): return node_classes.Const(value=self._instance.parent.doc, parent=self._instance) class InstanceModel(ObjectModel): @property def py__class__(self): return self._instance._proxied @property def py__module__(self): return node_classes.Const(self._instance.root().qname()) @property def py__doc__(self): return node_classes.Const(self._instance.doc) @property def py__dict__(self): return _dunder_dict(self._instance, self._instance.instance_attrs) class ExceptionInstanceModel(InstanceModel): @property def pyargs(self): message = node_classes.Const('') args = node_classes.Tuple(parent=self._instance) args.postinit((message, )) return args @property def py__traceback__(self): builtins_ast_module = astroid.MANAGER.astroid_cache[builtins.__name__] traceback_type = builtins_ast_module[types.TracebackType.__name__] return traceback_type.instantiate_class() class DictModel(ObjectModel): @property def py__class__(self): return self._instance._proxied def _generic_dict_attribute(self, obj, name): """Generate a bound method that can infer the given *obj*.""" class DictMethodBoundMethod(astroid.BoundMethod): def infer_call_result(self, caller, context=None): yield obj meth = next(self._instance._proxied.igetattr(name)) return DictMethodBoundMethod(proxy=meth, bound=self._instance) @property def pyitems(self): elems = [] obj = node_classes.List(parent=self._instance) for key, value in self._instance.items: elem = node_classes.Tuple(parent=obj) elem.postinit((key, value)) elems.append(elem) obj.postinit(elts=elems) from astroid import objects obj = objects.DictItems(obj) return self._generic_dict_attribute(obj, 'items') @property def pykeys(self): keys = [key for (key, _) in self._instance.items] obj = node_classes.List(parent=self._instance) obj.postinit(elts=keys) from astroid import objects obj = objects.DictKeys(obj) return self._generic_dict_attribute(obj, 'keys') @property def pyvalues(self): values = [value for (_, value) in self._instance.items] obj = node_classes.List(parent=self._instance) obj.postinit(values) from astroid import objects obj = objects.DictValues(obj) return self._generic_dict_attribute(obj, 'values') astroid-2.0.1/astroid/manager.py0000644000076500000240000002737013324063433017373 0ustar claudiustaff00000000000000# Copyright (c) 2006-2011, 2013-2014 LOGILAB S.A. (Paris, FRANCE) # Copyright (c) 2014-2018 Claudiu Popa # Copyright (c) 2014 BioGeek # Copyright (c) 2014 Google, Inc. # Copyright (c) 2014 Eevee (Alex Munroe) # Copyright (c) 2015-2016 Ceridwen # Copyright (c) 2016 Derek Gustafson # Copyright (c) 2017 Iva Miholic # Copyright (c) 2018 Bryce Guinta # Copyright (c) 2018 Nick Drozd # Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html # For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER """astroid manager: avoid multiple astroid build of a same module when possible by providing a class responsible to get astroid representation from various source and using a cache of built modules) """ import os import zipimport from astroid import exceptions from astroid.interpreter._import import spec from astroid import modutils from astroid import transforms def safe_repr(obj): try: return repr(obj) except Exception: # pylint: disable=broad-except return '???' class AstroidManager: """the astroid manager, responsible to build astroid from files or modules. Use the Borg pattern. """ name = 'astroid loader' brain = {} def __init__(self): self.__dict__ = AstroidManager.brain if not self.__dict__: # NOTE: cache entries are added by the [re]builder self.astroid_cache = {} self._mod_file_cache = {} self._failed_import_hooks = [] self.always_load_extensions = False self.optimize_ast = False self.extension_package_whitelist = set() self._transform = transforms.TransformVisitor() # Export these APIs for convenience self.register_transform = self._transform.register_transform self.unregister_transform = self._transform.unregister_transform self.max_inferable_values = 100 def visit_transforms(self, node): """Visit the transforms and apply them to the given *node*.""" return self._transform.visit(node) def ast_from_file(self, filepath, modname=None, fallback=True, source=False): """given a module name, return the astroid object""" try: filepath = modutils.get_source_file(filepath, include_no_ext=True) source = True except modutils.NoSourceFile: pass if modname is None: try: modname = '.'.join(modutils.modpath_from_file(filepath)) except ImportError: modname = filepath if modname in self.astroid_cache and self.astroid_cache[modname].file == filepath: return self.astroid_cache[modname] if source: from astroid.builder import AstroidBuilder return AstroidBuilder(self).file_build(filepath, modname) if fallback and modname: return self.ast_from_module_name(modname) raise exceptions.AstroidBuildingError( 'Unable to build an AST for {path}.', path=filepath) def _build_stub_module(self, modname): from astroid.builder import AstroidBuilder return AstroidBuilder(self).string_build('', modname) def _build_namespace_module(self, modname, path): from astroid.builder import build_namespace_package_module return build_namespace_package_module(modname, path) def _can_load_extension(self, modname): if self.always_load_extensions: return True if modutils.is_standard_module(modname): return True parts = modname.split('.') return any( '.'.join(parts[:x]) in self.extension_package_whitelist for x in range(1, len(parts) + 1)) def ast_from_module_name(self, modname, context_file=None): """given a module name, return the astroid object""" if modname in self.astroid_cache: return self.astroid_cache[modname] if modname == '__main__': return self._build_stub_module(modname) old_cwd = os.getcwd() if context_file: os.chdir(os.path.dirname(context_file)) try: found_spec = self.file_from_module_name(modname, context_file) if found_spec.type == spec.ModuleType.PY_ZIPMODULE: module = self.zip_import_data(found_spec.location) if module is not None: return module elif found_spec.type in (spec.ModuleType.C_BUILTIN, spec.ModuleType.C_EXTENSION): if (found_spec.type == spec.ModuleType.C_EXTENSION and not self._can_load_extension(modname)): return self._build_stub_module(modname) try: module = modutils.load_module_from_name(modname) except Exception as ex: raise exceptions.AstroidImportError( 'Loading {modname} failed with:\n{error}', modname=modname, path=found_spec.location) from ex return self.ast_from_module(module, modname) elif found_spec.type == spec.ModuleType.PY_COMPILED: raise exceptions.AstroidImportError( "Unable to load compiled module {modname}.", modname=modname, path=found_spec.location) elif found_spec.type == spec.ModuleType.PY_NAMESPACE: return self._build_namespace_module(modname, found_spec.submodule_search_locations) if found_spec.location is None: raise exceptions.AstroidImportError( "Can't find a file for module {modname}.", modname=modname) return self.ast_from_file(found_spec.location, modname, fallback=False) except exceptions.AstroidBuildingError as e: for hook in self._failed_import_hooks: try: return hook(modname) except exceptions.AstroidBuildingError: pass raise e finally: os.chdir(old_cwd) def zip_import_data(self, filepath): if zipimport is None: return None from astroid.builder import AstroidBuilder builder = AstroidBuilder(self) for ext in ('.zip', '.egg'): try: eggpath, resource = filepath.rsplit(ext + os.path.sep, 1) except ValueError: continue try: importer = zipimport.zipimporter(eggpath + ext) zmodname = resource.replace(os.path.sep, '.') if importer.is_package(resource): zmodname = zmodname + '.__init__' module = builder.string_build(importer.get_source(resource), zmodname, filepath) return module except Exception: # pylint: disable=broad-except continue return None def file_from_module_name(self, modname, contextfile): try: value = self._mod_file_cache[(modname, contextfile)] except KeyError: try: value = modutils.file_info_from_modpath( modname.split('.'), context_file=contextfile) except ImportError as ex: value = exceptions.AstroidImportError( 'Failed to import module {modname} with error:\n{error}.', modname=modname, error=ex) self._mod_file_cache[(modname, contextfile)] = value if isinstance(value, exceptions.AstroidBuildingError): raise value return value def ast_from_module(self, module, modname=None): """given an imported module, return the astroid object""" modname = modname or module.__name__ if modname in self.astroid_cache: return self.astroid_cache[modname] try: # some builtin modules don't have __file__ attribute filepath = module.__file__ if modutils.is_python_source(filepath): return self.ast_from_file(filepath, modname) except AttributeError: pass from astroid.builder import AstroidBuilder return AstroidBuilder(self).module_build(module, modname) def ast_from_class(self, klass, modname=None): """get astroid for the given class""" if modname is None: try: modname = klass.__module__ except AttributeError as exc: raise exceptions.AstroidBuildingError( 'Unable to get module for class {class_name}.', cls=klass, class_repr=safe_repr(klass), modname=modname) from exc modastroid = self.ast_from_module_name(modname) return modastroid.getattr(klass.__name__)[0] # XXX def infer_ast_from_something(self, obj, context=None): """infer astroid for the given class""" if hasattr(obj, '__class__') and not isinstance(obj, type): klass = obj.__class__ else: klass = obj try: modname = klass.__module__ except AttributeError as exc: raise exceptions.AstroidBuildingError( 'Unable to get module for {class_repr}.', cls=klass, class_repr=safe_repr(klass)) from exc except Exception as exc: raise exceptions.AstroidImportError( 'Unexpected error while retrieving module for {class_repr}:\n' '{error}', cls=klass, class_repr=safe_repr(klass)) from exc try: name = klass.__name__ except AttributeError as exc: raise exceptions.AstroidBuildingError( 'Unable to get name for {class_repr}:\n', cls=klass, class_repr=safe_repr(klass)) from exc except Exception as exc: raise exceptions.AstroidImportError( 'Unexpected error while retrieving name for {class_repr}:\n' '{error}', cls=klass, class_repr=safe_repr(klass)) from exc # take care, on living object __module__ is regularly wrong :( modastroid = self.ast_from_module_name(modname) if klass is obj: for inferred in modastroid.igetattr(name, context): yield inferred else: for inferred in modastroid.igetattr(name, context): yield inferred.instantiate_class() def register_failed_import_hook(self, hook): """Registers a hook to resolve imports that cannot be found otherwise. `hook` must be a function that accepts a single argument `modname` which contains the name of the module or package that could not be imported. If `hook` can resolve the import, must return a node of type `astroid.Module`, otherwise, it must raise `AstroidBuildingError`. """ self._failed_import_hooks.append(hook) def cache_module(self, module): """Cache a module if no module with the same name is known yet.""" self.astroid_cache.setdefault(module.name, module) def clear_cache(self, astroid_builtin=None): # XXX clear transforms self.astroid_cache.clear() # force bootstrap again, else we may ends up with cache inconsistency # between the manager and CONST_PROXY, making # unittest_lookup.LookupTC.test_builtin_lookup fail depending on the # test order import astroid.raw_building astroid.raw_building._astroid_bootstrapping( astroid_builtin=astroid_builtin) astroid-2.0.1/astroid/mixins.py0000644000076500000240000001251613324063433017264 0ustar claudiustaff00000000000000# Copyright (c) 2010-2011, 2013-2014 LOGILAB S.A. (Paris, FRANCE) # Copyright (c) 2014-2016, 2018 Claudiu Popa # Copyright (c) 2014 Google, Inc. # Copyright (c) 2014 Eevee (Alex Munroe) # Copyright (c) 2015-2016 Ceridwen # Copyright (c) 2015 Florian Bruhin # Copyright (c) 2016 Jakub Wilk # Copyright (c) 2018 Nick Drozd # Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html # For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER """This module contains some mixins for the different nodes. """ from astroid import decorators from astroid import exceptions class BlockRangeMixIn: """override block range """ @decorators.cachedproperty def blockstart_tolineno(self): return self.lineno def _elsed_block_range(self, lineno, orelse, last=None): """handle block line numbers range for try/finally, for, if and while statements """ if lineno == self.fromlineno: return lineno, lineno if orelse: if lineno >= orelse[0].fromlineno: return lineno, orelse[-1].tolineno return lineno, orelse[0].fromlineno - 1 return lineno, last or self.tolineno class FilterStmtsMixin: """Mixin for statement filtering and assignment type""" def _get_filtered_stmts(self, _, node, _stmts, mystmt): """method used in _filter_stmts to get statements and trigger break""" if self.statement() is mystmt: # original node's statement is the assignment, only keep # current node (gen exp, list comp) return [node], True return _stmts, False def assign_type(self): return self class AssignTypeMixin: def assign_type(self): return self def _get_filtered_stmts(self, lookup_node, node, _stmts, mystmt): """method used in filter_stmts""" if self is mystmt: return _stmts, True if self.statement() is mystmt: # original node's statement is the assignment, only keep # current node (gen exp, list comp) return [node], True return _stmts, False class ParentAssignTypeMixin(AssignTypeMixin): def assign_type(self): return self.parent.assign_type() class ImportFromMixin(FilterStmtsMixin): """MixIn for From and Import Nodes""" def _infer_name(self, frame, name): return name def do_import_module(self, modname=None): """return the ast for a module whose name is imported by """ # handle special case where we are on a package node importing a module # using the same name as the package, which may end in an infinite loop # on relative imports # XXX: no more needed ? mymodule = self.root() level = getattr(self, 'level', None) # Import as no level if modname is None: modname = self.modname # XXX we should investigate deeper if we really want to check # importing itself: modname and mymodule.name be relative or absolute if mymodule.relative_to_absolute_name(modname, level) == mymodule.name: # FIXME: we used to raise InferenceError here, but why ? return mymodule return mymodule.import_module(modname, level=level, relative_only=level and level >= 1) def real_name(self, asname): """get name from 'as' name""" for name, _asname in self.names: if name == '*': return asname if not _asname: name = name.split('.', 1)[0] _asname = name if asname == _asname: return name raise exceptions.AttributeInferenceError( 'Could not find original name for {attribute} in {target!r}', target=self, attribute=asname) class MultiLineBlockMixin: """Mixin for nodes with multi-line blocks, e.g. For and FunctionDef. Note that this does not apply to every node with a `body` field. For instance, an If node has a multi-line body, but the body of an IfExpr is not multi-line, and hence cannot contain Return nodes, Assign nodes, etc. """ @decorators.cachedproperty def _multi_line_blocks(self): return tuple( getattr(self, field) for field in self._multi_line_block_fields ) def _get_return_nodes_skip_functions(self): for block in self._multi_line_blocks: for child_node in block: if child_node.is_function: continue yield from child_node._get_return_nodes_skip_functions() def _get_yield_nodes_skip_lambdas(self): for block in self._multi_line_blocks: for child_node in block: if child_node.is_lambda: continue yield from child_node._get_yield_nodes_skip_lambdas() def _get_assign_nodes(self): for block in self._multi_line_blocks: for child_node in block: yield from child_node._get_assign_nodes() class NoChildrenMixin: """Mixin for nodes with no children, e.g. Pass.""" def get_children(self): yield from () astroid-2.0.1/astroid/modutils.py0000644000076500000240000005574013324063433017623 0ustar claudiustaff00000000000000# -*- coding: utf-8 -*- # Copyright (c) 2014-2018 Claudiu Popa # Copyright (c) 2014 Google, Inc. # Copyright (c) 2014 Denis Laxalde # Copyright (c) 2014 LOGILAB S.A. (Paris, FRANCE) # Copyright (c) 2014 Eevee (Alex Munroe) # Copyright (c) 2015 Florian Bruhin # Copyright (c) 2015 Radosław Ganczarek # Copyright (c) 2016 Derek Gustafson # Copyright (c) 2016 Jakub Wilk # Copyright (c) 2016 Ceridwen # Copyright (c) 2018 Mario Corchero # Copyright (c) 2018 Mario Corchero # Copyright (c) 2018 Anthony Sottile # Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html # For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER """Python modules manipulation utility functions. :type PY_SOURCE_EXTS: tuple(str) :var PY_SOURCE_EXTS: list of possible python source file extension :type STD_LIB_DIRS: set of str :var STD_LIB_DIRS: directories where standard modules are located :type BUILTIN_MODULES: dict :var BUILTIN_MODULES: dictionary with builtin module names has key """ import imp import os import platform import sys import itertools from distutils.sysconfig import get_python_lib # pylint: disable=import-error # pylint: disable=import-error, no-name-in-module from distutils.errors import DistutilsPlatformError # distutils is replaced by virtualenv with a module that does # weird path manipulations in order to get to the # real distutils module. from .interpreter._import import spec from .interpreter._import import util if sys.platform.startswith('win'): PY_SOURCE_EXTS = ('py', 'pyw') PY_COMPILED_EXTS = ('dll', 'pyd') else: PY_SOURCE_EXTS = ('py',) PY_COMPILED_EXTS = ('so',) try: # The explicit sys.prefix is to work around a patch in virtualenv that # replaces the 'real' sys.prefix (i.e. the location of the binary) # with the prefix from which the virtualenv was created. This throws # off the detection logic for standard library modules, thus the # workaround. STD_LIB_DIRS = { get_python_lib(standard_lib=True, prefix=sys.prefix), # Take care of installations where exec_prefix != prefix. get_python_lib(standard_lib=True, prefix=sys.exec_prefix), get_python_lib(standard_lib=True)} # get_python_lib(standard_lib=1) is not available on pypy, set STD_LIB_DIR to # non-valid path, see https://bugs.pypy.org/issue1164 except DistutilsPlatformError: STD_LIB_DIRS = set() if os.name == 'nt': STD_LIB_DIRS.add(os.path.join(sys.prefix, 'dlls')) try: # real_prefix is defined when running inside virtual environments, # created with the **virtualenv** library. STD_LIB_DIRS.add(os.path.join(sys.real_prefix, 'dlls')) except AttributeError: # sys.base_exec_prefix is always defined, but in a virtual environment # created with the stdlib **venv** module, it points to the original # installation, if the virtual env is activated. try: STD_LIB_DIRS.add(os.path.join(sys.base_exec_prefix, 'dlls')) except AttributeError: pass if platform.python_implementation() == 'PyPy': _root = os.path.join(sys.prefix, 'lib_pypy') STD_LIB_DIRS.add(_root) try: # real_prefix is defined when running inside virtualenv. STD_LIB_DIRS.add(os.path.join(sys.real_prefix, 'lib_pypy')) except AttributeError: pass del _root if os.name == 'posix': # Need the real prefix is we're under a virtualenv, otherwise # the usual one will do. try: prefix = sys.real_prefix except AttributeError: prefix = sys.prefix def _posix_path(path): base_python = 'python%d.%d' % sys.version_info[:2] return os.path.join(prefix, path, base_python) STD_LIB_DIRS.add(_posix_path('lib')) if sys.maxsize > 2**32: # This tries to fix a problem with /usr/lib64 builds, # where systems are running both 32-bit and 64-bit code # on the same machine, which reflects into the places where # standard library could be found. More details can be found # here http://bugs.python.org/issue1294959. # An easy reproducing case would be # https://github.com/PyCQA/pylint/issues/712#issuecomment-163178753 STD_LIB_DIRS.add(_posix_path('lib64')) EXT_LIB_DIR = get_python_lib() IS_JYTHON = platform.python_implementation() == 'Jython' BUILTIN_MODULES = dict.fromkeys(sys.builtin_module_names, True) class NoSourceFile(Exception): """exception raised when we are not able to get a python source file for a precompiled file """ def _normalize_path(path): return os.path.normcase(os.path.abspath(path)) def _canonicalize_path(path): return os.path.realpath(os.path.expanduser(path)) def _path_from_filename(filename, is_jython=IS_JYTHON): if not is_jython: if sys.version_info > (3, 0): return filename if filename.endswith(".pyc"): return filename[:-1] return filename head, has_pyclass, _ = filename.partition("$py.class") if has_pyclass: return head + ".py" return filename def _handle_blacklist(blacklist, dirnames, filenames): """remove files/directories in the black list dirnames/filenames are usually from os.walk """ for norecurs in blacklist: if norecurs in dirnames: dirnames.remove(norecurs) elif norecurs in filenames: filenames.remove(norecurs) _NORM_PATH_CACHE = {} def _cache_normalize_path(path): """abspath with caching""" # _module_file calls abspath on every path in sys.path every time it's # called; on a larger codebase this easily adds up to half a second just # assembling path components. This cache alleviates that. try: return _NORM_PATH_CACHE[path] except KeyError: if not path: # don't cache result for '' return _normalize_path(path) result = _NORM_PATH_CACHE[path] = _normalize_path(path) return result def load_module_from_name(dotted_name, path=None, use_sys=True): """Load a Python module from its name. :type dotted_name: str :param dotted_name: python name of a module or package :type path: list or None :param path: optional list of path where the module or package should be searched (use sys.path if nothing or None is given) :type use_sys: bool :param use_sys: boolean indicating whether the sys.modules dictionary should be used or not :raise ImportError: if the module or package is not found :rtype: module :return: the loaded module """ return load_module_from_modpath(dotted_name.split('.'), path, use_sys) def load_module_from_modpath(parts, path=None, use_sys=1): """Load a python module from its split name. :type parts: list(str) or tuple(str) :param parts: python name of a module or package split on '.' :type path: list or None :param path: optional list of path where the module or package should be searched (use sys.path if nothing or None is given) :type use_sys: bool :param use_sys: boolean indicating whether the sys.modules dictionary should be used or not :raise ImportError: if the module or package is not found :rtype: module :return: the loaded module """ if use_sys: try: return sys.modules['.'.join(parts)] except KeyError: pass modpath = [] prevmodule = None for part in parts: modpath.append(part) curname = '.'.join(modpath) module = None if len(modpath) != len(parts): # even with use_sys=False, should try to get outer packages from sys.modules module = sys.modules.get(curname) elif use_sys: # because it may have been indirectly loaded through a parent module = sys.modules.get(curname) if module is None: mp_file, mp_filename, mp_desc = imp.find_module(part, path) module = imp.load_module(curname, mp_file, mp_filename, mp_desc) # mp_file still needs to be closed. if mp_file: mp_file.close() if prevmodule: setattr(prevmodule, part, module) _file = getattr(module, '__file__', '') prevmodule = module if not _file and util.is_namespace(curname): continue if not _file and len(modpath) != len(parts): raise ImportError('no module in %s' % '.'.join(parts[len(modpath):])) path = [os.path.dirname(_file)] return module def load_module_from_file(filepath, path=None, use_sys=True, extrapath=None): """Load a Python module from it's path. :type filepath: str :param filepath: path to the python module or package :type path: list or None :param path: optional list of path where the module or package should be searched (use sys.path if nothing or None is given) :type use_sys: bool :param use_sys: boolean indicating whether the sys.modules dictionary should be used or not :raise ImportError: if the module or package is not found :rtype: module :return: the loaded module """ modpath = modpath_from_file(filepath, extrapath) return load_module_from_modpath(modpath, path, use_sys) def check_modpath_has_init(path, mod_path): """check there are some __init__.py all along the way""" modpath = [] for part in mod_path: modpath.append(part) path = os.path.join(path, part) if not _has_init(path): old_namespace = util.is_namespace('.'.join(modpath)) if not old_namespace: return False return True def _get_relative_base_path(filename, path_to_check): """Extracts the relative mod path of the file to import from Check if a file is within the passed in path and if so, returns the relative mod path from the one passed in. If the filename is no in path_to_check, returns None Note this function will look for both abs and realpath of the file, this allows to find the relative base path even if the file is a symlink of a file in the passed in path Examples: _get_relative_base_path("/a/b/c/d.py", "/a/b") -> ["c","d"] _get_relative_base_path("/a/b/c/d.py", "/dev") -> None """ importable_path = None path_to_check = os.path.normcase(path_to_check) abs_filename = os.path.abspath(filename) if os.path.normcase(abs_filename).startswith(path_to_check): importable_path = abs_filename real_filename = os.path.realpath(filename) if os.path.normcase(real_filename).startswith(path_to_check): importable_path = real_filename if importable_path: base_path = os.path.splitext(importable_path)[0] relative_base_path = base_path[len(path_to_check):] return [pkg for pkg in relative_base_path.split(os.sep) if pkg] return None def modpath_from_file_with_callback(filename, extrapath=None, is_package_cb=None): filename = os.path.expanduser(_path_from_filename(filename)) if extrapath is not None: for path_ in itertools.chain(map(_canonicalize_path, extrapath), extrapath): path = os.path.abspath(path_) if not path: continue submodpath = _get_relative_base_path(filename, path) if not submodpath: continue if is_package_cb(path, submodpath[:-1]): return extrapath[path_].split('.') + submodpath for path in itertools.chain(map(_canonicalize_path, sys.path), sys.path): path = _cache_normalize_path(path) if not path: continue modpath = _get_relative_base_path(filename, path) if not modpath: continue if is_package_cb(path, modpath[:-1]): return modpath raise ImportError('Unable to find module for %s in %s' % ( filename, ', \n'.join(sys.path))) def modpath_from_file(filename, extrapath=None): """given a file path return the corresponding split module's name (i.e name of a module or package split on '.') :type filename: str :param filename: file's path for which we want the module's name :type extrapath: dict :param extrapath: optional extra search path, with path as key and package name for the path as value. This is usually useful to handle package split in multiple directories using __path__ trick. :raise ImportError: if the corresponding module's name has not been found :rtype: list(str) :return: the corresponding split module's name """ return modpath_from_file_with_callback(filename, extrapath, check_modpath_has_init) def file_from_modpath(modpath, path=None, context_file=None): return file_info_from_modpath(modpath, path, context_file).location def file_info_from_modpath(modpath, path=None, context_file=None): """given a mod path (i.e. split module / package name), return the corresponding file, giving priority to source file over precompiled file if it exists :type modpath: list or tuple :param modpath: split module's name (i.e name of a module or package split on '.') (this means explicit relative imports that start with dots have empty strings in this list!) :type path: list or None :param path: optional list of path where the module or package should be searched (use sys.path if nothing or None is given) :type context_file: str or None :param context_file: context file to consider, necessary if the identifier has been introduced using a relative import unresolvable in the actual context (i.e. modutils) :raise ImportError: if there is no such module in the directory :rtype: (str or None, import type) :return: the path to the module's file or None if it's an integrated builtin module such as 'sys' """ if context_file is not None: context = os.path.dirname(context_file) else: context = context_file if modpath[0] == 'xml': # handle _xmlplus try: return _spec_from_modpath(['_xmlplus'] + modpath[1:], path, context) except ImportError: return _spec_from_modpath(modpath, path, context) elif modpath == ['os', 'path']: # FIXME: currently ignoring search_path... return spec.ModuleSpec(name='os.path', location=os.path.__file__, module_type=imp.PY_SOURCE) return _spec_from_modpath(modpath, path, context) def get_module_part(dotted_name, context_file=None): """given a dotted name return the module part of the name : >>> get_module_part('astroid.as_string.dump') 'astroid.as_string' :type dotted_name: str :param dotted_name: full name of the identifier we are interested in :type context_file: str or None :param context_file: context file to consider, necessary if the identifier has been introduced using a relative import unresolvable in the actual context (i.e. modutils) :raise ImportError: if there is no such module in the directory :rtype: str or None :return: the module part of the name or None if we have not been able at all to import the given name XXX: deprecated, since it doesn't handle package precedence over module (see #10066) """ # os.path trick if dotted_name.startswith('os.path'): return 'os.path' parts = dotted_name.split('.') if context_file is not None: # first check for builtin module which won't be considered latter # in that case (path != None) if parts[0] in BUILTIN_MODULES: if len(parts) > 2: raise ImportError(dotted_name) return parts[0] # don't use += or insert, we want a new list to be created ! path = None starti = 0 if parts[0] == '': assert context_file is not None, \ 'explicit relative import, but no context_file?' path = [] # prevent resolving the import non-relatively starti = 1 while parts[starti] == '': # for all further dots: change context starti += 1 context_file = os.path.dirname(context_file) for i in range(starti, len(parts)): try: file_from_modpath(parts[starti:i+1], path=path, context_file=context_file) except ImportError: if i < max(1, len(parts) - 2): raise return '.'.join(parts[:i]) return dotted_name def get_module_files(src_directory, blacklist, list_all=False): """given a package directory return a list of all available python module's files in the package and its subpackages :type src_directory: str :param src_directory: path of the directory corresponding to the package :type blacklist: list or tuple :param blacklist: iterable list of files or directories to ignore. :type list_all: bool :param list_all: get files from all paths, including ones without __init__.py :rtype: list :return: the list of all available python module's files in the package and its subpackages """ files = [] for directory, dirnames, filenames in os.walk(src_directory): if directory in blacklist: continue _handle_blacklist(blacklist, dirnames, filenames) # check for __init__.py if not list_all and '__init__.py' not in filenames: dirnames[:] = () continue for filename in filenames: if _is_python_file(filename): src = os.path.join(directory, filename) files.append(src) return files def get_source_file(filename, include_no_ext=False): """given a python module's file name return the matching source file name (the filename will be returned identically if it's a already an absolute path to a python source file...) :type filename: str :param filename: python module's file name :raise NoSourceFile: if no source file exists on the file system :rtype: str :return: the absolute path of the source file if it exists """ filename = os.path.abspath(_path_from_filename(filename)) base, orig_ext = os.path.splitext(filename) for ext in PY_SOURCE_EXTS: source_path = '%s.%s' % (base, ext) if os.path.exists(source_path): return source_path if include_no_ext and not orig_ext and os.path.exists(base): return base raise NoSourceFile(filename) def is_python_source(filename): """ rtype: bool return: True if the filename is a python source file """ return os.path.splitext(filename)[1][1:] in PY_SOURCE_EXTS def is_standard_module(modname, std_path=None): """try to guess if a module is a standard python module (by default, see `std_path` parameter's description) :type modname: str :param modname: name of the module we are interested in :type std_path: list(str) or tuple(str) :param std_path: list of path considered has standard :rtype: bool :return: true if the module: - is located on the path listed in one of the directory in `std_path` - is a built-in module """ modname = modname.split('.')[0] try: filename = file_from_modpath([modname]) except ImportError: # import failed, i'm probably not so wrong by supposing it's # not standard... return False # modules which are not living in a file are considered standard # (sys and __builtin__ for instance) if filename is None: # we assume there are no namespaces in stdlib return not util.is_namespace(modname) filename = _normalize_path(filename) if filename.startswith(_cache_normalize_path(EXT_LIB_DIR)): return False if std_path is None: std_path = STD_LIB_DIRS for path in std_path: if filename.startswith(_cache_normalize_path(path)): return True return False def is_relative(modname, from_file): """return true if the given module name is relative to the given file name :type modname: str :param modname: name of the module we are interested in :type from_file: str :param from_file: path of the module from which modname has been imported :rtype: bool :return: true if the module has been imported relatively to `from_file` """ if not os.path.isdir(from_file): from_file = os.path.dirname(from_file) if from_file in sys.path: return False try: stream, _, _ = imp.find_module(modname.split('.')[0], [from_file]) # Close the stream to avoid ResourceWarnings. if stream: stream.close() return True except ImportError: return False # internal only functions ##################################################### def _spec_from_modpath(modpath, path=None, context=None): """given a mod path (i.e. split module / package name), return the corresponding spec this function is used internally, see `file_from_modpath`'s documentation for more information """ assert modpath location = None if context is not None: try: found_spec = spec.find_spec(modpath, [context]) location = found_spec.location except ImportError: found_spec = spec.find_spec(modpath, path) location = found_spec.location else: found_spec = spec.find_spec(modpath, path) if found_spec.type == spec.ModuleType.PY_COMPILED: try: location = get_source_file(found_spec.location) return found_spec._replace(location=location, type=spec.ModuleType.PY_SOURCE) except NoSourceFile: return found_spec._replace(location=location) elif found_spec.type == spec.ModuleType.C_BUILTIN: # integrated builtin module return found_spec._replace(location=None) elif found_spec.type == spec.ModuleType.PKG_DIRECTORY: location = _has_init(found_spec.location) return found_spec._replace(location=location, type=spec.ModuleType.PY_SOURCE) return found_spec def _is_python_file(filename): """return true if the given filename should be considered as a python file .pyc and .pyo are ignored """ for ext in ('.py', '.so', '.pyd', '.pyw'): if filename.endswith(ext): return True return False def _has_init(directory): """if the given directory has a valid __init__ file, return its path, else return None """ mod_or_pack = os.path.join(directory, '__init__') for ext in PY_SOURCE_EXTS + ('pyc', 'pyo'): if os.path.exists(mod_or_pack + '.' + ext): return mod_or_pack + '.' + ext return None def is_namespace(specobj): return specobj.type == spec.ModuleType.PY_NAMESPACE def is_directory(specobj): return specobj.type == spec.ModuleType.PKG_DIRECTORY astroid-2.0.1/astroid/node_classes.py0000644000076500000240000041222013324063433020413 0ustar claudiustaff00000000000000# -*- coding: utf-8 -*- # pylint: disable=too-many-lines; https://github.com/PyCQA/astroid/issues/465 # Copyright (c) 2009-2011, 2013-2014 LOGILAB S.A. (Paris, FRANCE) # Copyright (c) 2010 Daniel Harding # Copyright (c) 2012 FELD Boris # Copyright (c) 2013-2014 Google, Inc. # Copyright (c) 2014-2018 Claudiu Popa # Copyright (c) 2014 Eevee (Alex Munroe) # Copyright (c) 2015-2016 Ceridwen # Copyright (c) 2015 Florian Bruhin # Copyright (c) 2016-2017 Derek Gustafson # Copyright (c) 2016 Jared Garst # Copyright (c) 2016 Jakub Wilk # Copyright (c) 2016 Dave Baum # Copyright (c) 2017-2018 Ashley Whetter # Copyright (c) 2017 Łukasz Rogalski # Copyright (c) 2017 rr- # Copyright (c) 2018 Bryce Guinta # Copyright (c) 2018 brendanator # Copyright (c) 2018 Nick Drozd # Copyright (c) 2018 HoverHell # Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html # For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER """Module for some node classes. More nodes in scoped_nodes.py """ import abc import builtins as builtins_mod import itertools import pprint from functools import lru_cache from functools import singledispatch as _singledispatch from astroid import as_string from astroid import bases from astroid import context as contextmod from astroid import decorators from astroid import exceptions from astroid import manager from astroid import mixins from astroid import util BUILTINS = builtins_mod.__name__ MANAGER = manager.AstroidManager() @decorators.raise_if_nothing_inferred def unpack_infer(stmt, context=None): """recursively generate nodes inferred by the given statement. If the inferred value is a list or a tuple, recurse on the elements """ if isinstance(stmt, (List, Tuple)): for elt in stmt.elts: if elt is util.Uninferable: yield elt continue yield from unpack_infer(elt, context) # Explicit StopIteration to return error information, see comment # in raise_if_nothing_inferred. return dict(node=stmt, context=context) # if inferred is a final node, return it and stop inferred = next(stmt.infer(context)) if inferred is stmt: yield inferred # Explicit StopIteration to return error information, see comment # in raise_if_nothing_inferred. return dict(node=stmt, context=context) # else, infer recursively, except Uninferable object that should be returned as is for inferred in stmt.infer(context): if inferred is util.Uninferable: yield inferred else: yield from unpack_infer(inferred, context) return dict(node=stmt, context=context) def are_exclusive(stmt1, stmt2, exceptions=None): # pylint: disable=redefined-outer-name """return true if the two given statements are mutually exclusive `exceptions` may be a list of exception names. If specified, discard If branches and check one of the statement is in an exception handler catching one of the given exceptions. algorithm : 1) index stmt1's parents 2) climb among stmt2's parents until we find a common parent 3) if the common parent is a If or TryExcept statement, look if nodes are in exclusive branches """ # index stmt1's parents stmt1_parents = {} children = {} node = stmt1.parent previous = stmt1 while node: stmt1_parents[node] = 1 children[node] = previous previous = node node = node.parent # climb among stmt2's parents until we find a common parent node = stmt2.parent previous = stmt2 while node: if node in stmt1_parents: # if the common parent is a If or TryExcept statement, look if # nodes are in exclusive branches if isinstance(node, If) and exceptions is None: if (node.locate_child(previous)[1] is not node.locate_child(children[node])[1]): return True elif isinstance(node, TryExcept): c2attr, c2node = node.locate_child(previous) c1attr, c1node = node.locate_child(children[node]) if c1node is not c2node: first_in_body_caught_by_handlers = ( c2attr == 'handlers' and c1attr == 'body' and previous.catch(exceptions)) second_in_body_caught_by_handlers = ( c2attr == 'body' and c1attr == 'handlers' and children[node].catch(exceptions)) first_in_else_other_in_handlers = ( c2attr == 'handlers' and c1attr == 'orelse') second_in_else_other_in_handlers = ( c2attr == 'orelse' and c1attr == 'handlers') if any((first_in_body_caught_by_handlers, second_in_body_caught_by_handlers, first_in_else_other_in_handlers, second_in_else_other_in_handlers)): return True elif c2attr == 'handlers' and c1attr == 'handlers': return previous is not children[node] return False previous = node node = node.parent return False # getitem() helpers. _SLICE_SENTINEL = object() def _slice_value(index, context=None): """Get the value of the given slice index.""" if isinstance(index, Const): if isinstance(index.value, (int, type(None))): return index.value elif index is None: return None else: # Try to infer what the index actually is. # Since we can't return all the possible values, # we'll stop at the first possible value. try: inferred = next(index.infer(context=context)) except exceptions.InferenceError: pass else: if isinstance(inferred, Const): if isinstance(inferred.value, (int, type(None))): return inferred.value # Use a sentinel, because None can be a valid # value that this function can return, # as it is the case for unspecified bounds. return _SLICE_SENTINEL def _infer_slice(node, context=None): lower = _slice_value(node.lower, context) upper = _slice_value(node.upper, context) step = _slice_value(node.step, context) if all(elem is not _SLICE_SENTINEL for elem in (lower, upper, step)): return slice(lower, upper, step) raise exceptions.AstroidTypeError( message='Could not infer slice used in subscript', node=node, index=node.parent, context=context) def _container_getitem(instance, elts, index, context=None): """Get a slice or an item, using the given *index*, for the given sequence.""" try: if isinstance(index, Slice): index_slice = _infer_slice(index, context=context) new_cls = instance.__class__() new_cls.elts = elts[index_slice] new_cls.parent = instance.parent return new_cls if isinstance(index, Const): return elts[index.value] except IndexError as exc: raise exceptions.AstroidIndexError( message='Index {index!s} out of range', node=instance, index=index, context=context) from exc except TypeError as exc: raise exceptions.AstroidTypeError( message='Type error {error!r}', node=instance, index=index, context=context) from exc raise exceptions.AstroidTypeError('Could not use %s as subscript index' % index) OP_PRECEDENCE = { op: precedence for precedence, ops in enumerate([ ['Lambda'], # lambda x: x + 1 ['IfExp'], # 1 if True else 2 ['or'], ['and'], ['not'], ['Compare'], # in, not in, is, is not, <, <=, >, >=, !=, == ['|'], ['^'], ['&'], ['<<', '>>'], ['+', '-'], ['*', '@', '/', '//', '%'], ['UnaryOp'], # +, -, ~ ['**'], ['Await'], ]) for op in ops } class NodeNG: """ A node of the new Abstract Syntax Tree (AST). This is the base class for all Astroid node classes. """ is_statement = False """Whether this node indicates a statement. :type: bool """ optional_assign = False # True for For (and for Comprehension if py <3.0) """Whether this node optionally assigns a variable. This is for loop assignments because loop won't necessarily perform an assignment if the loop has no iterations. This is also the case from comprehensions in Python 2. :type: bool """ is_function = False # True for FunctionDef nodes """Whether this node indicates a function. :type: bool """ is_lambda = False # Attributes below are set by the builder module or by raw factories lineno = None """The line that this node appears on in the source code. :type: int or None """ col_offset = None """The column that this node appears on in the source code. :type: int or None """ parent = None """The parent node in the syntax tree. :type: NodeNG or None """ _astroid_fields = () """Node attributes that contain child nodes. This is redefined in most concrete classes. :type: tuple(str) """ _other_fields = () """Node attributes that do not contain child nodes. :type: tuple(str) """ _other_other_fields = () """Attributes that contain AST-dependent fields. :type: tuple(str) """ # instance specific inference function infer(node, context) _explicit_inference = None def __init__(self, lineno=None, col_offset=None, parent=None): """ :param lineno: The line that this node appears on in the source code. :type lineno: int or None :param col_offset: The column that this node appears on in the source code. :type col_offset: int or None :param parent: The parent node in the syntax tree. :type parent: NodeNG or None """ self.lineno = lineno self.col_offset = col_offset self.parent = parent def infer(self, context=None, **kwargs): """Get a generator of the inferred values. This is the main entry point to the inference system. .. seealso:: :ref:`inference` If the instance has some explicit inference function set, it will be called instead of the default interface. :returns: The inferred values. :rtype: iterable """ if context is not None: context = context.extra_context.get(self, context) if self._explicit_inference is not None: # explicit_inference is not bound, give it self explicitly try: # pylint: disable=not-callable return self._explicit_inference(self, context, **kwargs) except exceptions.UseInferenceDefault: pass if not context: return self._infer(context, **kwargs) key = (self, context.lookupname, context.callcontext, context.boundnode) if key in context.inferred: return iter(context.inferred[key]) gen = context.cache_generator( key, self._infer(context, **kwargs)) return util.limit_inference(gen, MANAGER.max_inferable_values) def _repr_name(self): """Get a name for nice representation. This is either :attr:`name`, :attr:`attrname`, or the empty string. :returns: The nice name. :rtype: str """ names = {'name', 'attrname'} if all(name not in self._astroid_fields for name in names): return getattr(self, 'name', getattr(self, 'attrname', '')) return '' def __str__(self): rname = self._repr_name() cname = type(self).__name__ if rname: string = '%(cname)s.%(rname)s(%(fields)s)' alignment = len(cname) + len(rname) + 2 else: string = '%(cname)s(%(fields)s)' alignment = len(cname) + 1 result = [] for field in self._other_fields + self._astroid_fields: value = getattr(self, field) width = 80 - len(field) - alignment lines = pprint.pformat(value, indent=2, width=width).splitlines(True) inner = [lines[0]] for line in lines[1:]: inner.append(' ' * alignment + line) result.append('%s=%s' % (field, ''.join(inner))) return string % {'cname': cname, 'rname': rname, 'fields': (',\n' + ' ' * alignment).join(result)} def __repr__(self): rname = self._repr_name() if rname: string = '<%(cname)s.%(rname)s l.%(lineno)s at 0x%(id)x>' else: string = '<%(cname)s l.%(lineno)s at 0x%(id)x>' return string % {'cname': type(self).__name__, 'rname': rname, 'lineno': self.fromlineno, 'id': id(self)} def accept(self, visitor): """Visit this node using the given visitor.""" func = getattr(visitor, "visit_" + self.__class__.__name__.lower()) return func(self) def get_children(self): """Get the child nodes below this node. :returns: The children. :rtype: iterable(NodeNG) """ for field in self._astroid_fields: attr = getattr(self, field) if attr is None: continue if isinstance(attr, (list, tuple)): yield from attr else: yield attr def last_child(self): """An optimized version of list(get_children())[-1] :returns: The last child, or None if no children exist. :rtype: NodeNG or None """ for field in self._astroid_fields[::-1]: attr = getattr(self, field) if not attr: # None or empty listy / tuple continue if isinstance(attr, (list, tuple)): return attr[-1] return attr return None def parent_of(self, node): """Check if this node is the parent of the given node. :param node: The node to check if it is the child. :type node: NodeNG :returns: True if this node is the parent of the given node, False otherwise. :rtype: bool """ parent = node.parent while parent is not None: if self is parent: return True parent = parent.parent return False def statement(self): """The first parent node, including self, marked as statement node. :returns: The first parent statement. :rtype: NodeNG """ if self.is_statement: return self return self.parent.statement() def frame(self): """The first parent frame node. A frame node is a :class:`Module`, :class:`FunctionDef`, or :class:`ClassDef`. :returns: The first parent frame node. :rtype: Module or FunctionDef or ClassDef """ return self.parent.frame() def scope(self): """The first parent node defining a new scope. :returns: The first parent scope node. :rtype: Module or FunctionDef or ClassDef or Lambda or GenExpr """ return self.parent.scope() def root(self): """Return the root node of the syntax tree. :returns: The root node. :rtype: Module """ if self.parent: return self.parent.root() return self def child_sequence(self, child): """Search for the sequence that contains this child. :param child: The child node to search sequences for. :type child: NodeNG :returns: The sequence containing the given child node. :rtype: iterable(NodeNG) :raises AstroidError: If no sequence could be found that contains the given child. """ for field in self._astroid_fields: node_or_sequence = getattr(self, field) if node_or_sequence is child: return [node_or_sequence] # /!\ compiler.ast Nodes have an __iter__ walking over child nodes if (isinstance(node_or_sequence, (tuple, list)) and child in node_or_sequence): return node_or_sequence msg = 'Could not find %s in %s\'s children' raise exceptions.AstroidError(msg % (repr(child), repr(self))) def locate_child(self, child): """Find the field of this node that contains the given child. :param child: The child node to search fields for. :type child: NodeNG :returns: A tuple of the name of the field that contains the child, and the sequence or node that contains the child node. :rtype: tuple(str, iterable(NodeNG) or NodeNG) :raises AstroidError: If no field could be found that contains the given child. """ for field in self._astroid_fields: node_or_sequence = getattr(self, field) # /!\ compiler.ast Nodes have an __iter__ walking over child nodes if child is node_or_sequence: return field, child if isinstance(node_or_sequence, (tuple, list)) and child in node_or_sequence: return field, node_or_sequence msg = 'Could not find %s in %s\'s children' raise exceptions.AstroidError(msg % (repr(child), repr(self))) # FIXME : should we merge child_sequence and locate_child ? locate_child # is only used in are_exclusive, child_sequence one time in pylint. def next_sibling(self): """The next sibling statement node. :returns: The next sibling statement node. :rtype: NodeNG or None """ return self.parent.next_sibling() def previous_sibling(self): """The previous sibling statement. :returns: The previous sibling statement node. :rtype: NodeNG or None """ return self.parent.previous_sibling() def nearest(self, nodes): """Get the node closest to this one from the given list of nodes. :param nodes: The list of nodes to search. All of these nodes must belong to the same module as this one. The list should be sorted by the line number of the nodes, smallest first. :type nodes: iterable(NodeNG) :returns: The node closest to this one in the source code, or None if one could not be found. :rtype: NodeNG or None """ myroot = self.root() mylineno = self.fromlineno nearest = None, 0 for node in nodes: assert node.root() is myroot, \ 'nodes %s and %s are not from the same module' % (self, node) lineno = node.fromlineno if node.fromlineno > mylineno: break if lineno > nearest[1]: nearest = node, lineno # FIXME: raise an exception if nearest is None ? return nearest[0] # these are lazy because they're relatively expensive to compute for every # single node, and they rarely get looked at @decorators.cachedproperty def fromlineno(self): """The first line that this node appears on in the source code. :type: int or None """ if self.lineno is None: return self._fixed_source_line() return self.lineno @decorators.cachedproperty def tolineno(self): """The last line that this node appears on in the source code. :type: int or None """ if not self._astroid_fields: # can't have children lastchild = None else: lastchild = self.last_child() if lastchild is None: return self.fromlineno return lastchild.tolineno def _fixed_source_line(self): """Attempt to find the line that this node appears on. We need this method since not all nodes have :attr:`lineno` set. :returns: The line number of this node, or None if this could not be determined. :rtype: int or None """ line = self.lineno _node = self try: while line is None: _node = next(_node.get_children()) line = _node.lineno except StopIteration: _node = self.parent while _node and line is None: line = _node.lineno _node = _node.parent return line def block_range(self, lineno): """Get a range from the given line number to where this node ends. :param lineno: The line number to start the range at. :type lineno: int :returns: The range of line numbers that this node belongs to, starting at the given line number. :rtype: tuple(int, int or None) """ return lineno, self.tolineno def set_local(self, name, stmt): """Define that the given name is declared in the given statement node. This definition is stored on the parent scope node. .. seealso:: :meth:`scope` :param name: The name that is being defined. :type name: str :param stmt: The statement that defines the given name. :type stmt: NodeNG """ self.parent.set_local(name, stmt) def nodes_of_class(self, klass, skip_klass=None): """Get the nodes (including this one or below) of the given type. :param klass: The type of node to search for. :type klass: builtins.type :param skip_klass: A type of node to ignore. This is useful to ignore subclasses of :attr:`klass`. :type skip_klass: builtins.type :returns: The node of the given type. :rtype: iterable(NodeNG) """ if isinstance(self, klass): yield self if skip_klass is None: for child_node in self.get_children(): yield from child_node.nodes_of_class(klass, skip_klass) return for child_node in self.get_children(): if isinstance(child_node, skip_klass): continue yield from child_node.nodes_of_class(klass, skip_klass) def _get_assign_nodes(self): yield from () def _get_name_nodes(self): for child_node in self.get_children(): yield from child_node._get_name_nodes() def _get_return_nodes_skip_functions(self): yield from () def _get_yield_nodes_skip_lambdas(self): yield from () def _infer_name(self, frame, name): # overridden for ImportFrom, Import, Global, TryExcept and Arguments pass def _infer(self, context=None): """we don't know how to resolve a statement by default""" # this method is overridden by most concrete classes raise exceptions.InferenceError('No inference function for {node!r}.', node=self, context=context) def inferred(self): """Get a list of the inferred values. .. seealso:: :ref:`inference` :returns: The inferred values. :rtype: list """ return list(self.infer()) def instantiate_class(self): """Instantiate a instance of the defined class. .. note:: On anything other than a :class:`ClassDef` this will return self. :returns: An instance of the defined class. :rtype: object """ return self def has_base(self, node): """Check if this node inherits from the given type. :param node: The node defining the base to look for. Usually this is a :class:`Name` node. :type node: NodeNG """ return False def callable(self): """Whether this node defines something that is callable. :returns: True if this defines something that is callable, False otherwise. :rtype: bool """ return False def eq(self, value): return False def as_string(self): """Get the source code that this node represents. :returns: The source code. :rtype: str """ return as_string.to_code(self) def repr_tree(self, ids=False, include_linenos=False, ast_state=False, indent=' ', max_depth=0, max_width=80): """Get a string representation of the AST from this node. :param ids: If true, includes the ids with the node type names. :type ids: bool :param include_linenos: If true, includes the line numbers and column offsets. :type include_linenos: bool :param ast_state: If true, includes information derived from the whole AST like local and global variables. :type ast_state: bool :param indent: A string to use to indent the output string. :type indent: str :param max_depth: If set to a positive integer, won't return nodes deeper than max_depth in the string. :type max_depth: int :param max_width: Attempt to format the output string to stay within this number of characters, but can exceed it under some circumstances. Only positive integer values are valid, the default is 80. :type max_width: int :returns: The string representation of the AST. :rtype: str """ # pylint: disable=too-many-statements @_singledispatch def _repr_tree(node, result, done, cur_indent='', depth=1): """Outputs a representation of a non-tuple/list, non-node that's contained within an AST, including strings. """ lines = pprint.pformat(node, width=max(max_width - len(cur_indent), 1)).splitlines(True) result.append(lines[0]) result.extend([cur_indent + line for line in lines[1:]]) return len(lines) != 1 # pylint: disable=unused-variable; doesn't understand singledispatch @_repr_tree.register(tuple) @_repr_tree.register(list) def _repr_seq(node, result, done, cur_indent='', depth=1): """Outputs a representation of a sequence that's contained within an AST.""" cur_indent += indent result.append('[') if not node: broken = False elif len(node) == 1: broken = _repr_tree(node[0], result, done, cur_indent, depth) elif len(node) == 2: broken = _repr_tree(node[0], result, done, cur_indent, depth) if not broken: result.append(', ') else: result.append(',\n') result.append(cur_indent) broken = (_repr_tree(node[1], result, done, cur_indent, depth) or broken) else: result.append('\n') result.append(cur_indent) for child in node[:-1]: _repr_tree(child, result, done, cur_indent, depth) result.append(',\n') result.append(cur_indent) _repr_tree(node[-1], result, done, cur_indent, depth) broken = True result.append(']') return broken # pylint: disable=unused-variable; doesn't understand singledispatch @_repr_tree.register(NodeNG) def _repr_node(node, result, done, cur_indent='', depth=1): """Outputs a strings representation of an astroid node.""" if node in done: result.append(indent + ' max_depth: result.append('...') return False depth += 1 cur_indent += indent if ids: result.append('%s<0x%x>(\n' % (type(node).__name__, id(node))) else: result.append('%s(' % type(node).__name__) fields = [] if include_linenos: fields.extend(('lineno', 'col_offset')) fields.extend(node._other_fields) fields.extend(node._astroid_fields) if ast_state: fields.extend(node._other_other_fields) if not fields: broken = False elif len(fields) == 1: result.append('%s=' % fields[0]) broken = _repr_tree(getattr(node, fields[0]), result, done, cur_indent, depth) else: result.append('\n') result.append(cur_indent) for field in fields[:-1]: result.append('%s=' % field) _repr_tree(getattr(node, field), result, done, cur_indent, depth) result.append(',\n') result.append(cur_indent) result.append('%s=' % fields[-1]) _repr_tree(getattr(node, fields[-1]), result, done, cur_indent, depth) broken = True result.append(')') return broken result = [] _repr_tree(self, result, set()) return ''.join(result) def bool_value(self): """Determine the boolean value of this node. The boolean value of a node can have three possible values: * False: For instance, empty data structures, False, empty strings, instances which return explicitly False from the __nonzero__ / __bool__ method. * True: Most of constructs are True by default: classes, functions, modules etc * Uninferable: The inference engine is uncertain of the node's value. :returns: The boolean value of this node. :rtype: bool or Uninferable """ return util.Uninferable def op_precedence(self): # Look up by class name or default to highest precedence return OP_PRECEDENCE.get( self.__class__.__name__, len(OP_PRECEDENCE)) def op_left_associative(self): # Everything is left associative except `**` and IfExp return True class Statement(NodeNG): """Statement node adding a few attributes""" is_statement = True """Whether this node indicates a statement. :type: bool """ def next_sibling(self): """The next sibling statement node. :returns: The next sibling statement node. :rtype: NodeNG or None """ stmts = self.parent.child_sequence(self) index = stmts.index(self) try: return stmts[index +1] except IndexError: pass def previous_sibling(self): """The previous sibling statement. :returns: The previous sibling statement node. :rtype: NodeNG or None """ stmts = self.parent.child_sequence(self) index = stmts.index(self) if index >= 1: return stmts[index -1] return None class _BaseContainer(mixins.ParentAssignTypeMixin, NodeNG, bases.Instance, metaclass=abc.ABCMeta): """Base class for Set, FrozenSet, Tuple and List.""" _astroid_fields = ('elts',) def __init__(self, lineno=None, col_offset=None, parent=None): """ :param lineno: The line that this node appears on in the source code. :type lineno: int or None :param col_offset: The column that this node appears on in the source code. :type col_offset: int or None :param parent: The parent node in the syntax tree. :type parent: NodeNG or None """ self.elts = [] """The elements in the node. :type: list(NodeNG) """ super(_BaseContainer, self).__init__(lineno, col_offset, parent) def postinit(self, elts): """Do some setup after initialisation. :param elts: The list of elements the that node contains. :type elts: list(NodeNG) """ self.elts = elts @classmethod def from_constants(cls, elts=None): """Create a node of this type from the given list of elements. :param elts: The list of elements that the node should contain. :type elts: list(NodeNG) :returns: A new node containing the given elements. :rtype: NodeNG """ node = cls() if elts is None: node.elts = [] else: node.elts = [const_factory(e) for e in elts] return node def itered(self): """An iterator over the elements this node contains. :returns: The contents of this node. :rtype: iterable(NodeNG) """ return self.elts def bool_value(self): """Determine the boolean value of this node. :returns: The boolean value of this node. :rtype: bool or Uninferable """ return bool(self.elts) @abc.abstractmethod def pytype(self): """Get the name of the type that this node represents. :returns: The name of the type. :rtype: str """ def get_children(self): yield from self.elts class LookupMixIn: """Mixin to look up a name in the right scope.""" @lru_cache(maxsize=None) def lookup(self, name): """Lookup where the given variable is assigned. The lookup starts from self's scope. If self is not a frame itself and the name is found in the inner frame locals, statements will be filtered to remove ignorable statements according to self's location. :param name: The name of the variable to find assignments for. :type name: str :returns: The scope node and the list of assignments associated to the given name according to the scope where it has been found (locals, globals or builtin). :rtype: tuple(str, list(NodeNG)) """ return self.scope().scope_lookup(self, name) def ilookup(self, name): """Lookup the inferred values of the given variable. :param name: The variable name to find values for. :type name: str :returns: The inferred values of the statements returned from :meth:`lookup`. :rtype: iterable """ frame, stmts = self.lookup(name) context = contextmod.InferenceContext() return bases._infer_stmts(stmts, context, frame) def _filter_stmts(self, stmts, frame, offset): """Filter the given list of statements to remove ignorable statements. If self is not a frame itself and the name is found in the inner frame locals, statements will be filtered to remove ignorable statements according to self's location. :param stmts: The statements to filter. :type stmts: list(NodeNG) :param frame: The frame that all of the given statements belong to. :type frame: NodeNG :param offset: The line offset to filter statements up to. :type offset: int :returns: The filtered statements. :rtype: list(NodeNG) """ # if offset == -1, my actual frame is not the inner frame but its parent # # class A(B): pass # # we need this to resolve B correctly if offset == -1: myframe = self.frame().parent.frame() else: myframe = self.frame() # If the frame of this node is the same as the statement # of this node, then the node is part of a class or # a function definition and the frame of this node should be the # the upper frame, not the frame of the definition. # For more information why this is important, # see Pylint issue #295. # For example, for 'b', the statement is the same # as the frame / scope: # # def test(b=1): # ... if self.statement() is myframe and myframe.parent: myframe = myframe.parent.frame() mystmt = self.statement() # line filtering if we are in the same frame # # take care node may be missing lineno information (this is the case for # nodes inserted for living objects) if myframe is frame and mystmt.fromlineno is not None: assert mystmt.fromlineno is not None, mystmt mylineno = mystmt.fromlineno + offset else: # disabling lineno filtering mylineno = 0 _stmts = [] _stmt_parents = [] for node in stmts: stmt = node.statement() # line filtering is on and we have reached our location, break if stmt.fromlineno > mylineno > 0: break # Ignore decorators with the same name as the # decorated function # Fixes issue #375 if mystmt is stmt and is_from_decorator(self): continue assert hasattr(node, 'assign_type'), (node, node.scope(), node.scope().locals) assign_type = node.assign_type() if node.has_base(self): break _stmts, done = assign_type._get_filtered_stmts(self, node, _stmts, mystmt) if done: break optional_assign = assign_type.optional_assign if optional_assign and assign_type.parent_of(self): # we are inside a loop, loop var assignment is hiding previous # assignment _stmts = [node] _stmt_parents = [stmt.parent] continue # XXX comment various branches below!!! try: pindex = _stmt_parents.index(stmt.parent) except ValueError: pass else: # we got a parent index, this means the currently visited node # is at the same block level as a previously visited node if _stmts[pindex].assign_type().parent_of(assign_type): # both statements are not at the same block level continue # if currently visited node is following previously considered # assignment and both are not exclusive, we can drop the # previous one. For instance in the following code :: # # if a: # x = 1 # else: # x = 2 # print x # # we can't remove neither x = 1 nor x = 2 when looking for 'x' # of 'print x'; while in the following :: # # x = 1 # x = 2 # print x # # we can remove x = 1 when we see x = 2 # # moreover, on loop assignment types, assignment won't # necessarily be done if the loop has no iteration, so we don't # want to clear previous assignments if any (hence the test on # optional_assign) if not (optional_assign or are_exclusive(_stmts[pindex], node)): del _stmt_parents[pindex] del _stmts[pindex] if isinstance(node, AssignName): if not optional_assign and stmt.parent is mystmt.parent: _stmts = [] _stmt_parents = [] elif isinstance(node, DelName): _stmts = [] _stmt_parents = [] continue if not are_exclusive(self, node): _stmts.append(node) _stmt_parents.append(stmt.parent) return _stmts # Name classes class AssignName(mixins.NoChildrenMixin, LookupMixIn, mixins.ParentAssignTypeMixin, NodeNG): """Variation of :class:`ast.Assign` representing assignment to a name. An :class:`AssignName` is the name of something that is assigned to. This includes variables defined in a function signature or in a loop. >>> node = astroid.extract_node('variable = range(10)') >>> node >>> list(node.get_children()) [, ] >>> list(node.get_children())[0].as_string() 'variable' """ _other_fields = ('name',) def __init__(self, name=None, lineno=None, col_offset=None, parent=None): """ :param name: The name that is assigned to. :type name: str or None :param lineno: The line that this node appears on in the source code. :type lineno: int or None :param col_offset: The column that this node appears on in the source code. :type col_offset: int or None :param parent: The parent node in the syntax tree. :type parent: NodeNG or None """ self.name = name """The name that is assigned to. :type: str or None """ super(AssignName, self).__init__(lineno, col_offset, parent) class DelName(mixins.NoChildrenMixin, LookupMixIn, mixins.ParentAssignTypeMixin, NodeNG): """Variation of :class:`ast.Delete` represention deletion of a name. A :class:`DelName` is the name of something that is deleted. >>> node = astroid.extract_node("del variable #@") >>> list(node.get_children()) [] >>> list(node.get_children())[0].as_string() 'variable' """ _other_fields = ('name',) def __init__(self, name=None, lineno=None, col_offset=None, parent=None): """ :param name: The name that is being deleted. :type name: str or None :param lineno: The line that this node appears on in the source code. :type lineno: int or None :param col_offset: The column that this node appears on in the source code. :type col_offset: int or None :param parent: The parent node in the syntax tree. :type parent: NodeNG or None """ self.name = name """The name that is being deleted. :type: str or None """ super(DelName, self).__init__(lineno, col_offset, parent) class Name(mixins.NoChildrenMixin, LookupMixIn, NodeNG): """Class representing an :class:`ast.Name` node. A :class:`Name` node is something that is named, but not covered by :class:`AssignName` or :class:`DelName`. >>> node = astroid.extract_node('range(10)') >>> node >>> list(node.get_children()) [, ] >>> list(node.get_children())[0].as_string() 'range' """ _other_fields = ('name',) def __init__(self, name=None, lineno=None, col_offset=None, parent=None): """ :param name: The name that this node refers to. :type name: str or None :param lineno: The line that this node appears on in the source code. :type lineno: int or None :param col_offset: The column that this node appears on in the source code. :type col_offset: int or None :param parent: The parent node in the syntax tree. :type parent: NodeNG or None """ self.name = name """The name that this node refers to. :type: str or None """ super(Name, self).__init__(lineno, col_offset, parent) def _get_name_nodes(self): yield self for child_node in self.get_children(): yield from child_node._get_name_nodes() class Arguments(mixins.AssignTypeMixin, NodeNG): """Class representing an :class:`ast.arguments` node. An :class:`Arguments` node represents that arguments in a function definition. >>> node = astroid.extract_node('def foo(bar): pass') >>> node >>> node.args """ # Python 3.4+ uses a different approach regarding annotations, # each argument is a new class, _ast.arg, which exposes an # 'annotation' attribute. In astroid though, arguments are exposed # as is in the Arguments node and the only way to expose annotations # is by using something similar with Python 3.3: # - we expose 'varargannotation' and 'kwargannotation' of annotations # of varargs and kwargs. # - we expose 'annotation', a list with annotations for # for each normal argument. If an argument doesn't have an # annotation, its value will be None. _astroid_fields = ('args', 'defaults', 'kwonlyargs', 'kw_defaults', 'annotations', 'varargannotation', 'kwargannotation', 'kwonlyargs_annotations') varargannotation = None """The type annotation for the variable length arguments. :type: NodeNG """ kwargannotation = None """The type annotation for the variable length keyword arguments. :type: NodeNG """ _other_fields = ('vararg', 'kwarg') def __init__(self, vararg=None, kwarg=None, parent=None): """ :param vararg: The name of the variable length arguments. :type vararg: str or None :param kwarg: The name of the variable length keyword arguments. :type kwarg: str or None :param parent: The parent node in the syntax tree. :type parent: NodeNG or None """ super(Arguments, self).__init__(parent=parent) self.vararg = vararg """The name of the variable length arguments. :type: str or None """ self.kwarg = kwarg """The name of the variable length keyword arguments. :type: str or None """ self.args = [] """The names of the required arguments. :type: list(AssignName) """ self.defaults = [] """The default values for arguments that can be passed positionally. :type: list(NodeNG) """ self.kwonlyargs = [] """The keyword arguments that cannot be passed positionally. :type: list(AssignName) """ self.kw_defaults = [] """The default values for keyword arguments that cannot be passed positionally. :type: list(NodeNG) """ self.annotations = [] """The type annotations of arguments that can be passed positionally. :type: list(NodeNG) """ self.kwonlyargs_annotations = [] """The type annotations of arguments that cannot be passed positionally. :type: list(NodeNG) """ def postinit(self, args, defaults, kwonlyargs, kw_defaults, annotations, kwonlyargs_annotations=None, varargannotation=None, kwargannotation=None): """Do some setup after initialisation. :param args: The names of the required arguments. :type args: list(AssignName) :param defaults: The default values for arguments that can be passed positionally. :type defaults: list(NodeNG) :param kwonlyargs: The keyword arguments that cannot be passed positionally. :type kwonlyargs: list(AssignName) :param kw_defaults: The default values for keyword arguments that cannot be passed positionally. :type kw_defaults: list(NodeNG) :param annotations: The type annotations of arguments that can be passed positionally. :type annotations: list(NodeNG) :param kwonlyargs_annotations: The type annotations of arguments that cannot be passed positionally. This should always be passed in Python 3. :type kwonlyargs_annotations: list(NodeNG) :param varargannotation: The type annotation for the variable length arguments. :type varargannotation: NodeNG :param kwargannotation: The type annotation for the variable length keyword arguments. :type kwargannotation: NodeNG """ self.args = args self.defaults = defaults self.kwonlyargs = kwonlyargs self.kw_defaults = kw_defaults self.annotations = annotations self.kwonlyargs_annotations = kwonlyargs_annotations self.varargannotation = varargannotation self.kwargannotation = kwargannotation def _infer_name(self, frame, name): if self.parent is frame: return name return None @decorators.cachedproperty def fromlineno(self): """The first line that this node appears on in the source code. :type: int or None """ lineno = super(Arguments, self).fromlineno return max(lineno, self.parent.fromlineno or 0) def format_args(self): """Get the arguments formatted as string. :returns: The formatted arguments. :rtype: str """ result = [] if self.args: result.append( _format_args(self.args, self.defaults, getattr(self, 'annotations', None)) ) if self.vararg: result.append('*%s' % self.vararg) if self.kwonlyargs: if not self.vararg: result.append('*') result.append(_format_args( self.kwonlyargs, self.kw_defaults, self.kwonlyargs_annotations )) if self.kwarg: result.append('**%s' % self.kwarg) return ', '.join(result) def default_value(self, argname): """Get the default value for an argument. :param argname: The name of the argument to get the default value for. :type argname: str :raises NoDefault: If there is no default value defined for the given argument. """ i = _find_arg(argname, self.args)[0] if i is not None: idx = i - (len(self.args) - len(self.defaults)) if idx >= 0: return self.defaults[idx] i = _find_arg(argname, self.kwonlyargs)[0] if i is not None and self.kw_defaults[i] is not None: return self.kw_defaults[i] raise exceptions.NoDefault(func=self.parent, name=argname) def is_argument(self, name): """Check if the given name is defined in the arguments. :param name: The name to check for. :type name: str :returns: True if the given name is defined in the arguments, False otherwise. :rtype: bool """ if name == self.vararg: return True if name == self.kwarg: return True return (self.find_argname(name, True)[1] is not None or self.kwonlyargs and _find_arg(name, self.kwonlyargs, True)[1] is not None) def find_argname(self, argname, rec=False): """Get the index and :class:`AssignName` node for given name. :param argname: The name of the argument to search for. :type argname: str :param rec: Whether or not to include arguments in unpacked tuples in the search. :type rec: bool :returns: The index and node for the argument. :rtype: tuple(str or None, AssignName or None) """ if self.args: # self.args may be None in some cases (builtin function) return _find_arg(argname, self.args, rec) return None, None def get_children(self): yield from self.args or () yield from self.defaults yield from self.kwonlyargs for elt in self.kw_defaults: if elt is not None: yield elt for elt in self.annotations: if elt is not None: yield elt if self.varargannotation is not None: yield self.varargannotation if self.kwargannotation is not None: yield self.kwargannotation for elt in self.kwonlyargs_annotations: if elt is not None: yield elt def _find_arg(argname, args, rec=False): for i, arg in enumerate(args): if isinstance(arg, Tuple): if rec: found = _find_arg(argname, arg.elts) if found[0] is not None: return found elif arg.name == argname: return i, arg return None, None def _format_args(args, defaults=None, annotations=None): values = [] if args is None: return '' if annotations is None: annotations = [] if defaults is not None: default_offset = len(args) - len(defaults) packed = itertools.zip_longest(args, annotations) for i, (arg, annotation) in enumerate(packed): if isinstance(arg, Tuple): values.append('(%s)' % _format_args(arg.elts)) else: argname = arg.name if annotation is not None: argname += ':' + annotation.as_string() values.append(argname) if defaults is not None and i >= default_offset: if defaults[i-default_offset] is not None: values[-1] += '=' + defaults[i-default_offset].as_string() return ', '.join(values) class AssignAttr(mixins.ParentAssignTypeMixin, NodeNG): """Variation of :class:`ast.Assign` representing assignment to an attribute. >>> node = astroid.extract_node('self.attribute = range(10)') >>> node >>> list(node.get_children()) [, ] >>> list(node.get_children())[0].as_string() 'self.attribute' """ _astroid_fields = ('expr',) _other_fields = ('attrname',) expr = None """What has the attribute that is being assigned to. :type: NodeNG or None """ def __init__(self, attrname=None, lineno=None, col_offset=None, parent=None): """ :param attrname: The name of the attribute being assigned to. :type attrname: str or None :param lineno: The line that this node appears on in the source code. :type lineno: int or None :param col_offset: The column that this node appears on in the source code. :type col_offset: int or None :param parent: The parent node in the syntax tree. :type parent: NodeNG or None """ self.attrname = attrname """The name of the attribute being assigned to. :type: str or None """ super(AssignAttr, self).__init__(lineno, col_offset, parent) def postinit(self, expr=None): """Do some setup after initialisation. :param expr: What has the attribute that is being assigned to. :type expr: NodeNG or None """ self.expr = expr def get_children(self): yield self.expr class Assert(Statement): """Class representing an :class:`ast.Assert` node. An :class:`Assert` node represents an assert statement. >>> node = astroid.extract_node('assert len(things) == 10, "Not enough things"') >>> node """ _astroid_fields = ('test', 'fail',) test = None """The test that passes or fails the assertion. :type: NodeNG or None """ fail = None """The message shown when the assertion fails. :type: NodeNG or None """ def postinit(self, test=None, fail=None): """Do some setup after initialisation. :param test: The test that passes or fails the assertion. :type test: NodeNG or None :param fail: The message shown when the assertion fails. :type fail: NodeNG or None """ self.fail = fail self.test = test def get_children(self): yield self.test if self.fail is not None: yield self.fail class Assign(mixins.AssignTypeMixin, Statement): """Class representing an :class:`ast.Assign` node. An :class:`Assign` is a statement where something is explicitly asssigned to. >>> node = astroid.extract_node('variable = range(10)') >>> node """ _astroid_fields = ('targets', 'value',) _other_other_fields = ('type_annotation',) targets = None """What is being assigned to. :type: list(NodeNG) or None """ value = None """The value being assigned to the variables. :type: NodeNG or None """ type_annotation = None """If present, this will contain the type annotation passed by a type comment :type: NodeNG or None """ def postinit(self, targets=None, value=None, type_annotation=None): """Do some setup after initialisation. :param targets: What is being assigned to. :type targets: list(NodeNG) or None :param value: The value being assigned to the variables. :type: NodeNG or None """ self.targets = targets self.value = value self.type_annotation = type_annotation def get_children(self): yield from self.targets yield self.value def _get_assign_nodes(self): yield self yield from self.value._get_assign_nodes() class AnnAssign(mixins.AssignTypeMixin, Statement): """Class representing an :class:`ast.AnnAssign` node. An :class:`AnnAssign` is an assignment with a type annotation. >>> node = astroid.extract_node('variable: List[int] = range(10)') >>> node """ _astroid_fields = ('target', 'annotation', 'value',) _other_fields = ('simple',) target = None """What is being assigned to. :type: NodeNG or None """ annotation = None """The type annotation of what is being assigned to. :type: NodeNG """ value = None """The value being assigned to the variables. :type: NodeNG or None """ simple = None """Whether :attr:`target` is a pure name or a complex statement. :type: int """ def postinit(self, target, annotation, simple, value=None): """Do some setup after initialisation. :param target: What is being assigned to. :type target: NodeNG :param annotation: The type annotation of what is being assigned to. :type: NodeNG :param simple: Whether :attr:`target` is a pure name or a complex statement. :type simple: int :param value: The value being assigned to the variables. :type: NodeNG or None """ self.target = target self.annotation = annotation self.value = value self.simple = simple def get_children(self): yield self.target yield self.annotation if self.value is not None: yield self.value class AugAssign(mixins.AssignTypeMixin, Statement): """Class representing an :class:`ast.AugAssign` node. An :class:`AugAssign` is an assignment paired with an operator. >>> node = astroid.extract_node('variable += 1') >>> node """ _astroid_fields = ('target', 'value') _other_fields = ('op',) target = None """What is being assigned to. :type: NodeNG or None """ value = None """The value being assigned to the variable. :type: NodeNG or None """ def __init__(self, op=None, lineno=None, col_offset=None, parent=None): """ :param op: The operator that is being combined with the assignment. This includes the equals sign. :type op: str or None :param lineno: The line that this node appears on in the source code. :type lineno: int or None :param col_offset: The column that this node appears on in the source code. :type col_offset: int or None :param parent: The parent node in the syntax tree. :type parent: NodeNG or None """ self.op = op """The operator that is being combined with the assignment. This includes the equals sign. :type: str or None """ super(AugAssign, self).__init__(lineno, col_offset, parent) def postinit(self, target=None, value=None): """Do some setup after initialisation. :param target: What is being assigned to. :type target: NodeNG or None :param value: The value being assigned to the variable. :type: NodeNG or None """ self.target = target self.value = value # This is set by inference.py def _infer_augassign(self, context=None): raise NotImplementedError def type_errors(self, context=None): """Get a list of type errors which can occur during inference. Each TypeError is represented by a :class:`BadBinaryOperationMessage` , which holds the original exception. :returns: The list of possible type errors. :rtype: list(BadBinaryOperationMessage) """ try: results = self._infer_augassign(context=context) return [result for result in results if isinstance(result, util.BadBinaryOperationMessage)] except exceptions.InferenceError: return [] def get_children(self): yield self.target yield self.value class Repr(NodeNG): """Class representing an :class:`ast.Repr` node. A :class:`Repr` node represents the backtick syntax, which is a deprecated alias for :func:`repr` removed in Python 3. >>> node = astroid.extract_node('`variable`') >>> node """ _astroid_fields = ('value',) value = None """What is having :func:`repr` called on it. :type: NodeNG or None """ def postinit(self, value=None): """Do some setup after initialisation. :param value: What is having :func:`repr` called on it. :type value: NodeNG or None """ self.value = value class BinOp(NodeNG): """Class representing an :class:`ast.BinOp` node. A :class:`BinOp` node is an application of a binary operator. >>> node = astroid.extract_node('a + b') >>> node """ _astroid_fields = ('left', 'right') _other_fields = ('op',) left = None """What is being applied to the operator on the left side. :type: NodeNG or None """ right = None """What is being applied to the operator on the right side. :type: NodeNG or None """ def __init__(self, op=None, lineno=None, col_offset=None, parent=None): """ :param op: The operator. :type: str or None :param lineno: The line that this node appears on in the source code. :type lineno: int or None :param col_offset: The column that this node appears on in the source code. :type col_offset: int or None :param parent: The parent node in the syntax tree. :type parent: NodeNG or None """ self.op = op """The operator. :type: str or None """ super(BinOp, self).__init__(lineno, col_offset, parent) def postinit(self, left=None, right=None): """Do some setup after initialisation. :param left: What is being applied to the operator on the left side. :type left: NodeNG or None :param right: What is being applied to the operator on the right side. :type right: NodeNG or None """ self.left = left self.right = right # This is set by inference.py def _infer_binop(self, context=None): raise NotImplementedError def type_errors(self, context=None): """Get a list of type errors which can occur during inference. Each TypeError is represented by a :class:`BadBinaryOperationMessage`, which holds the original exception. :returns: The list of possible type errors. :rtype: list(BadBinaryOperationMessage) """ try: results = self._infer_binop(context=context) return [result for result in results if isinstance(result, util.BadBinaryOperationMessage)] except exceptions.InferenceError: return [] def get_children(self): yield self.left yield self.right def op_precedence(self): return OP_PRECEDENCE[self.op] def op_left_associative(self): # 2**3**4 == 2**(3**4) return self.op != '**' class BoolOp(NodeNG): """Class representing an :class:`ast.BoolOp` node. A :class:`BoolOp` is an application of a boolean operator. >>> node = astroid.extract_node('a and b') >>> node """ _astroid_fields = ('values',) _other_fields = ('op',) values = None """The values being applied to the operator. :type: list(NodeNG) or None """ def __init__(self, op=None, lineno=None, col_offset=None, parent=None): """ :param op: The operator. :type: str or None :param lineno: The line that this node appears on in the source code. :type lineno: int or None :param col_offset: The column that this node appears on in the source code. :type col_offset: int or None :param parent: The parent node in the syntax tree. :type parent: NodeNG or None """ self.op = op """The operator. :type: str or None """ super(BoolOp, self).__init__(lineno, col_offset, parent) def postinit(self, values=None): """Do some setup after initialisation. :param values: The values being applied to the operator. :type values: list(NodeNG) or None """ self.values = values def get_children(self): yield from self.values def op_precedence(self): return OP_PRECEDENCE[self.op] class Break(mixins.NoChildrenMixin, Statement): """Class representing an :class:`ast.Break` node. >>> node = astroid.extract_node('break') >>> node """ class Call(NodeNG): """Class representing an :class:`ast.Call` node. A :class:`Call` node is a call to a function, method, etc. >>> node = astroid.extract_node('function()') >>> node """ _astroid_fields = ('func', 'args', 'keywords') func = None """What is being called. :type: NodeNG or None """ args = None """The positional arguments being given to the call. :type: list(NodeNG) or None """ keywords = None """The keyword arguments being given to the call. :type: list(NodeNG) or None """ def postinit(self, func=None, args=None, keywords=None): """Do some setup after initialisation. :param func: What is being called. :type func: NodeNG or None :param args: The positional arguments being given to the call. :type args: list(NodeNG) or None :param keywords: The keyword arguments being given to the call. :type keywords: list(NodeNG) or None """ self.func = func self.args = args self.keywords = keywords @property def starargs(self): """The positional arguments that unpack something. :type: list(Starred) """ args = self.args or [] return [arg for arg in args if isinstance(arg, Starred)] @property def kwargs(self): """The keyword arguments that unpack something. :type: list(Keyword) """ keywords = self.keywords or [] return [keyword for keyword in keywords if keyword.arg is None] def get_children(self): yield self.func yield from self.args yield from self.keywords or () class Compare(NodeNG): """Class representing an :class:`ast.Compare` node. A :class:`Compare` node indicates a comparison. >>> node = astroid.extract_node('a <= b <= c') >>> node >>> node.ops [('<=', ), ('<=', )] """ _astroid_fields = ('left', 'ops',) left = None """The value at the left being applied to a comparison operator. :type: NodeNG or None """ ops = None """The remainder of the operators and their relevant right hand value. :type: list(tuple(str, NodeNG)) or None """ def postinit(self, left=None, ops=None): """Do some setup after initialisation. :param left: The value at the left being applied to a comparison operator. :type left: NodeNG or None :param ops: The remainder of the operators and their relevant right hand value. :type ops: list(tuple(str, NodeNG)) or None """ self.left = left self.ops = ops def get_children(self): """Get the child nodes below this node. Overridden to handle the tuple fields and skip returning the operator strings. :returns: The children. :rtype: iterable(NodeNG) """ yield self.left for _, comparator in self.ops: yield comparator # we don't want the 'op' def last_child(self): """An optimized version of list(get_children())[-1] :returns: The last child. :rtype: NodeNG """ # XXX maybe if self.ops: return self.ops[-1][1] #return self.left class Comprehension(NodeNG): """Class representing an :class:`ast.comprehension` node. A :class:`Comprehension` indicates the loop inside any type of comprehension including generator expressions. >>> node = astroid.extract_node('[x for x in some_values]') >>> list(node.get_children()) [, ] >>> list(node.get_children())[1].as_string() 'for x in some_values' """ _astroid_fields = ('target', 'iter', 'ifs') _other_fields = ('is_async',) target = None """What is assigned to by the comprehension. :type: NodeNG or None """ iter = None """What is iterated over by the comprehension. :type: NodeNG or None """ ifs = None """The contents of any if statements that filter the comprehension. :type: list(NodeNG) or None """ is_async = None """Whether this is an asynchronous comprehension or not. :type: bool or None """ def __init__(self, parent=None): """ :param parent: The parent node in the syntax tree. :type parent: NodeNG or None """ super(Comprehension, self).__init__() self.parent = parent # pylint: disable=redefined-builtin; same name as builtin ast module. def postinit(self, target=None, iter=None, ifs=None, is_async=None): """Do some setup after initialisation. :param target: What is assigned to by the comprehension. :type target: NodeNG or None :param iter: What is iterated over by the comprehension. :type iter: NodeNG or None :param ifs: The contents of any if statements that filter the comprehension. :type ifs: list(NodeNG) or None :param is_async: Whether this is an asynchronous comprehension or not. :type: bool or None """ self.target = target self.iter = iter self.ifs = ifs self.is_async = is_async optional_assign = True """Whether this node optionally assigns a variable. :type: bool """ def assign_type(self): """The type of assignment that this node performs. :returns: The assignment type. :rtype: NodeNG """ return self def _get_filtered_stmts(self, lookup_node, node, stmts, mystmt): """method used in filter_stmts""" if self is mystmt: if isinstance(lookup_node, (Const, Name)): return [lookup_node], True elif self.statement() is mystmt: # original node's statement is the assignment, only keeps # current node (gen exp, list comp) return [node], True return stmts, False def get_children(self): yield self.target yield self.iter yield from self.ifs class Const(mixins.NoChildrenMixin, NodeNG, bases.Instance): """Class representing any constant including num, str, bool, None, bytes. >>> node = astroid.extract_node('(5, "This is a string.", True, None, b"bytes")') >>> node >>> list(node.get_children()) [, , , , ] """ _other_fields = ('value',) def __init__(self, value, lineno=None, col_offset=None, parent=None): """ :param value: The value that the constant represents. :type value: object :param lineno: The line that this node appears on in the source code. :type lineno: int or None :param col_offset: The column that this node appears on in the source code. :type col_offset: int or None :param parent: The parent node in the syntax tree. :type parent: NodeNG or None """ self.value = value """The value that the constant represents. :type: object """ super(Const, self).__init__(lineno, col_offset, parent) def __getattr__(self, name): # This is needed because of Proxy's __getattr__ method. # Calling object.__new__ on this class without calling # __init__ would result in an infinite loop otherwise # since __getattr__ is called when an attribute doesn't # exist and self._proxied indirectly calls self.value # and Proxy __getattr__ calls self.value if name == "value": raise AttributeError return super().__getattr__(name) def getitem(self, index, context=None): """Get an item from this node if subscriptable. :param index: The node to use as a subscript index. :type index: Const or Slice :raises AstroidTypeError: When the given index cannot be used as a subscript index, or if this node is not subscriptable. """ if isinstance(index, Const): index_value = index.value elif isinstance(index, Slice): index_value = _infer_slice(index, context=context) else: raise exceptions.AstroidTypeError( 'Could not use type {} as subscript index'.format(type(index)) ) try: if isinstance(self.value, (str, bytes)): return Const(self.value[index_value]) except IndexError as exc: raise exceptions.AstroidIndexError( message='Index {index!r} out of range', node=self, index=index, context=context) from exc except TypeError as exc: raise exceptions.AstroidTypeError( message='Type error {error!r}', node=self, index=index, context=context) from exc raise exceptions.AstroidTypeError( '%r (value=%s)' % (self, self.value) ) def has_dynamic_getattr(self): """Check if the node has a custom __getattr__ or __getattribute__. :returns: True if the class has a custom __getattr__ or __getattribute__, False otherwise. For a :class:`Const` this is always ``False``. :rtype: bool """ return False def itered(self): """An iterator over the elements this node contains. :returns: The contents of this node. :rtype: iterable(str) :raises TypeError: If this node does not represent something that is iterable. """ if isinstance(self.value, str): return self.value raise TypeError() def pytype(self): """Get the name of the type that this node represents. :returns: The name of the type. :rtype: str """ return self._proxied.qname() def bool_value(self): """Determine the boolean value of this node. :returns: The boolean value of this node. :rtype: bool """ return bool(self.value) class Continue(mixins.NoChildrenMixin, Statement): """Class representing an :class:`ast.Continue` node. >>> node = astroid.extract_node('continue') >>> node """ class Decorators(NodeNG): """A node representing a list of decorators. A :class:`Decorators` is the decorators that are applied to a method or function. >>> node = astroid.extract_node(''' @property def my_property(self): return 3 ''') >>> node >>> list(node.get_children())[0] """ _astroid_fields = ('nodes',) nodes = None """The decorators that this node contains. :type: list(Name or Call) or None """ def postinit(self, nodes): """Do some setup after initialisation. :param nodes: The decorators that this node contains. :type nodes: list(Name or Call) """ self.nodes = nodes def scope(self): """The first parent node defining a new scope. :returns: The first parent scope node. :rtype: Module or FunctionDef or ClassDef or Lambda or GenExpr """ # skip the function node to go directly to the upper level scope return self.parent.parent.scope() def get_children(self): yield from self.nodes class DelAttr(mixins.ParentAssignTypeMixin, NodeNG): """Variation of :class:`ast.Delete` representing deletion of an attribute. >>> node = astroid.extract_node('del self.attr') >>> node >>> list(node.get_children())[0] """ _astroid_fields = ('expr',) _other_fields = ('attrname',) expr = None """The name that this node represents. :type: Name or None """ def __init__(self, attrname=None, lineno=None, col_offset=None, parent=None): """ :param attrname: The name of the attribute that is being deleted. :type attrname: str or None :param lineno: The line that this node appears on in the source code. :type lineno: int or None :param col_offset: The column that this node appears on in the source code. :type col_offset: int or None :param parent: The parent node in the syntax tree. :type parent: NodeNG or None """ self.attrname = attrname """The name of the attribute that is being deleted. :type: str or None """ super(DelAttr, self).__init__(lineno, col_offset, parent) def postinit(self, expr=None): """Do some setup after initialisation. :param expr: The name that this node represents. :type expr: Name or None """ self.expr = expr def get_children(self): yield self.expr class Delete(mixins.AssignTypeMixin, Statement): """Class representing an :class:`ast.Delete` node. A :class:`Delete` is a ``del`` statement this is deleting something. >>> node = astroid.extract_node('del self.attr') >>> node """ _astroid_fields = ('targets',) targets = None """What is being deleted. :type: list(NodeNG) or None """ def postinit(self, targets=None): """Do some setup after initialisation. :param targets: What is being deleted. :type targets: list(NodeNG) or None """ self.targets = targets def get_children(self): yield from self.targets class Dict(NodeNG, bases.Instance): """Class representing an :class:`ast.Dict` node. A :class:`Dict` is a dictionary that is created with ``{}`` syntax. >>> node = astroid.extract_node('{1: "1"}') >>> node """ _astroid_fields = ('items',) def __init__(self, lineno=None, col_offset=None, parent=None): """ :param lineno: The line that this node appears on in the source code. :type lineno: int or None :param col_offset: The column that this node appears on in the source code. :type col_offset: int or None :param parent: The parent node in the syntax tree. :type parent: NodeNG or None """ self.items = [] """The key-value pairs contained in the dictionary. :type: list(tuple(NodeNG, NodeNG)) """ super(Dict, self).__init__(lineno, col_offset, parent) def postinit(self, items): """Do some setup after initialisation. :param items: The key-value pairs contained in the dictionary. :type items: list(tuple(NodeNG, NodeNG)) """ self.items = items @classmethod def from_constants(cls, items=None): """Create a :class:`Dict` of constants from a live dictionary. :param items: The items to store in the node. :type items: dict :returns: The created dictionary node. :rtype: Dict """ node = cls() if items is None: node.items = [] else: node.items = [(const_factory(k), const_factory(v)) for k, v in items.items()] return node def pytype(self): """Get the name of the type that this node represents. :returns: The name of the type. :rtype: str """ return '%s.dict' % BUILTINS def get_children(self): """Get the key and value nodes below this node. Children are returned in the order that they are defined in the source code, key first then the value. :returns: The children. :rtype: iterable(NodeNG) """ for key, value in self.items: yield key yield value def last_child(self): """An optimized version of list(get_children())[-1] :returns: The last child, or None if no children exist. :rtype: NodeNG or None """ if self.items: return self.items[-1][1] return None def itered(self): """An iterator over the keys this node contains. :returns: The keys of this node. :rtype: iterable(NodeNG) """ return [key for (key, _) in self.items] def getitem(self, index, context=None): """Get an item from this node. :param index: The node to use as a subscript index. :type index: Const or Slice :raises AstroidTypeError: When the given index cannot be used as a subscript index, or if this node is not subscriptable. :raises AstroidIndexError: If the given index does not exist in the dictionary. """ for key, value in self.items: # TODO(cpopa): no support for overriding yet, {1:2, **{1: 3}}. if isinstance(key, DictUnpack): try: return value.getitem(index, context) except (exceptions.AstroidTypeError, exceptions.AstroidIndexError): continue for inferredkey in key.infer(context): if inferredkey is util.Uninferable: continue if isinstance(inferredkey, Const) and isinstance(index, Const): if inferredkey.value == index.value: return value raise exceptions.AstroidIndexError(index) def bool_value(self): """Determine the boolean value of this node. :returns: The boolean value of this node. :rtype: bool """ return bool(self.items) class Expr(Statement): """Class representing an :class:`ast.Expr` node. An :class:`Expr` is any expression that does not have its value used or stored. >>> node = astroid.extract_node('method()') >>> node >>> node.parent """ _astroid_fields = ('value',) value = None """What the expression does. :type: NodeNG or None """ def postinit(self, value=None): """Do some setup after initialisation. :param value: What the expression does. :type value: NodeNG or None """ self.value = value def get_children(self): yield self.value def _get_yield_nodes_skip_lambdas(self): if not self.value.is_lambda: yield from self.value._get_yield_nodes_skip_lambdas() class Ellipsis(mixins.NoChildrenMixin, NodeNG): # pylint: disable=redefined-builtin """Class representing an :class:`ast.Ellipsis` node. An :class:`Ellipsis` is the ``...`` syntax. >>> node = astroid.extract_node('...') >>> node """ def bool_value(self): """Determine the boolean value of this node. :returns: The boolean value of this node. For an :class:`Ellipsis` this is always ``True``. :rtype: bool """ return True class EmptyNode(mixins.NoChildrenMixin, NodeNG): """Holds an arbitrary object in the :attr:`LocalsDictNodeNG.locals`.""" object = None class ExceptHandler(mixins.MultiLineBlockMixin, mixins.AssignTypeMixin, Statement): """Class representing an :class:`ast.ExceptHandler`. node. An :class:`ExceptHandler` is an ``except`` block on a try-except. >>> node = astroid.extract_node(''' try: do_something() except Exception as error: print("Error!") ''') >>> node >>> >>> node.handlers [] """ _astroid_fields = ('type', 'name', 'body',) _multi_line_block_fields = ('body',) type = None """The types that the block handles. :type: Tuple or NodeNG or None """ name = None """The name that the caught exception is assigned to. :type: AssignName or None """ body = None """The contents of the block. :type: list(NodeNG) or None """ def get_children(self): if self.type is not None: yield self.type if self.name is not None: yield self.name yield from self.body # pylint: disable=redefined-builtin; had to use the same name as builtin ast module. def postinit(self, type=None, name=None, body=None): """Do some setup after initialisation. :param type: The types that the block handles. :type type: Tuple or NodeNG or None :param name: The name that the caught exception is assigned to. :type name: AssignName or None :param body:The contents of the block. :type body: list(NodeNG) or None """ self.type = type self.name = name self.body = body @decorators.cachedproperty def blockstart_tolineno(self): """The line on which the beginning of this block ends. :type: int """ if self.name: return self.name.tolineno if self.type: return self.type.tolineno return self.lineno def catch(self, exceptions): # pylint: disable=redefined-outer-name """Check if this node handles any of the given exceptions. If ``exceptions`` is empty, this will default to ``True``. :param exceptions: The name of the exceptions to check for. :type exceptions: list(str) """ if self.type is None or exceptions is None: return True for node in self.type._get_name_nodes(): if node.name in exceptions: return True return False class Exec(Statement): """Class representing the ``exec`` statement. >>> node = astroid.extract_node('exec "True"') >>> node """ _astroid_fields = ('expr', 'globals', 'locals',) expr = None """The expression to be executed. :type: NodeNG or None """ globals = None """The globals dictionary to execute with. :type: NodeNG or None """ locals = None """The locals dictionary to execute with. :type: NodeNG or None """ # pylint: disable=redefined-builtin; had to use the same name as builtin ast module. def postinit(self, expr=None, globals=None, locals=None): """Do some setup after initialisation. :param expr: The expression to be executed. :type expr: NodeNG or None :param globals:The globals dictionary to execute with. :type globals: NodeNG or None :param locals: The locals dictionary to execute with. :type locals: NodeNG or None """ self.expr = expr self.globals = globals self.locals = locals class ExtSlice(NodeNG): """Class representing an :class:`ast.ExtSlice` node. An :class:`ExtSlice` is a complex slice expression. >>> node = astroid.extract_node('l[1:3, 5]') >>> node >>> node.slice """ _astroid_fields = ('dims',) dims = None """The simple dimensions that form the complete slice. :type: list(NodeNG) or None """ def postinit(self, dims=None): """Do some setup after initialisation. :param dims: The simple dimensions that form the complete slice. :type dims: list(NodeNG) or None """ self.dims = dims class For(mixins.MultiLineBlockMixin, mixins.BlockRangeMixIn, mixins.AssignTypeMixin, Statement): """Class representing an :class:`ast.For` node. >>> node = astroid.extract_node('for thing in things: print(thing)') >>> node """ _astroid_fields = ('target', 'iter', 'body', 'orelse',) _other_other_fields = ('type_annotation',) _multi_line_block_fields = ('body', 'orelse') target = None """What the loop assigns to. :type: NodeNG or None """ iter = None """What the loop iterates over. :type: NodeNG or None """ body = None """The contents of the body of the loop. :type: list(NodeNG) or None """ orelse = None """The contents of the ``else`` block of the loop. :type: list(NodeNG) or None """ type_annotation = None """If present, this will contain the type annotation passed by a type comment :type: NodeNG or None """ # pylint: disable=redefined-builtin; had to use the same name as builtin ast module. def postinit(self, target=None, iter=None, body=None, orelse=None, type_annotation=None): """Do some setup after initialisation. :param target: What the loop assigns to. :type target: NodeNG or None :param iter: What the loop iterates over. :type iter: NodeNG or None :param body: The contents of the body of the loop. :type body: list(NodeNG) or None :param orelse: The contents of the ``else`` block of the loop. :type orelse: list(NodeNG) or None """ self.target = target self.iter = iter self.body = body self.orelse = orelse self.type_annotation = type_annotation optional_assign = True """Whether this node optionally assigns a variable. This is always ``True`` for :class:`For` nodes. :type: bool """ @decorators.cachedproperty def blockstart_tolineno(self): """The line on which the beginning of this block ends. :type: int """ return self.iter.tolineno def get_children(self): yield self.target yield self.iter yield from self.body yield from self.orelse class AsyncFor(For): """Class representing an :class:`ast.AsyncFor` node. An :class:`AsyncFor` is an asynchronous :class:`For` built with the ``async`` keyword. >>> node = astroid.extract_node(''' async def func(things): async for thing in things: print(thing) ''') >>> node >>> node.body[0] """ class Await(NodeNG): """Class representing an :class:`ast.Await` node. An :class:`Await` is the ``await`` keyword. >>> node = astroid.extract_node(''' async def func(things): await other_func() ''') >>> node >>> node.body[0] >>> list(node.body[0].get_children())[0] """ _astroid_fields = ('value', ) value = None """What to wait for. :type: NodeNG or None """ def postinit(self, value=None): """Do some setup after initialisation. :param value: What to wait for. :type value: NodeNG or None """ self.value = value def get_children(self): yield self.value class ImportFrom(mixins.NoChildrenMixin, mixins.ImportFromMixin, Statement): """Class representing an :class:`ast.ImportFrom` node. >>> node = astroid.extract_node('from my_package import my_module') >>> node """ _other_fields = ('modname', 'names', 'level') def __init__(self, fromname, names, level=0, lineno=None, col_offset=None, parent=None): """ :param fromname: The module that is being imported from. :type fromname: str or None :param names: What is being imported from the module. :type names: list(tuple(str, str or None)) :param level: The level of relative import. :type level: int :param lineno: The line that this node appears on in the source code. :type lineno: int or None :param col_offset: The column that this node appears on in the source code. :type col_offset: int or None :param parent: The parent node in the syntax tree. :type parent: NodeNG or None """ self.modname = fromname """The module that is being imported from. This is ``None`` for relative imports. :type: str or None """ self.names = names """What is being imported from the module. Each entry is a :class:`tuple` of the name being imported, and the alias that the name is assigned to (if any). :type: list(tuple(str, str or None)) """ self.level = level """The level of relative import. Essentially this is the number of dots in the import. This is always 0 for absolute imports. :type: int """ super(ImportFrom, self).__init__(lineno, col_offset, parent) class Attribute(NodeNG): """Class representing an :class:`ast.Attribute` node.""" _astroid_fields = ('expr',) _other_fields = ('attrname',) expr = None """The name that this node represents. :type: Name or None """ def __init__(self, attrname=None, lineno=None, col_offset=None, parent=None): """ :param attrname: The name of the attribute. :type attrname: str or None :param lineno: The line that this node appears on in the source code. :type lineno: int or None :param col_offset: The column that this node appears on in the source code. :type col_offset: int or None :param parent: The parent node in the syntax tree. :type parent: NodeNG or None """ self.attrname = attrname """The name of the attribute. :type: str or None """ super(Attribute, self).__init__(lineno, col_offset, parent) def postinit(self, expr=None): """Do some setup after initialisation. :param expr: The name that this node represents. :type expr: Name or None """ self.expr = expr def get_children(self): yield self.expr class Global(mixins.NoChildrenMixin, Statement): """Class representing an :class:`ast.Global` node. >>> node = astroid.extract_node('global a_global') >>> node """ _other_fields = ('names',) def __init__(self, names, lineno=None, col_offset=None, parent=None): """ :param names: The names being declared as global. :type names: list(str) :param lineno: The line that this node appears on in the source code. :type lineno: int or None :param col_offset: The column that this node appears on in the source code. :type col_offset: int or None :param parent: The parent node in the syntax tree. :type parent: NodeNG or None """ self.names = names """The names being declared as global. :type: list(str) """ super(Global, self).__init__(lineno, col_offset, parent) def _infer_name(self, frame, name): return name class If(mixins.MultiLineBlockMixin, mixins.BlockRangeMixIn, Statement): """Class representing an :class:`ast.If` node. >>> node = astroid.extract_node('if condition: print(True)') >>> node """ _astroid_fields = ('test', 'body', 'orelse') _multi_line_block_fields = ('body', 'orelse') test = None """The condition that the statement tests. :type: NodeNG or None """ body = None """The contents of the block. :type: list(NodeNG) or None """ orelse = None """The contents of the ``else`` block. :type: list(NodeNG) or None """ def postinit(self, test=None, body=None, orelse=None): """Do some setup after initialisation. :param test: The condition that the statement tests. :type test: NodeNG or None :param body: The contents of the block. :type body: list(NodeNG) or None :param orelse: The contents of the ``else`` block. :type orelse: list(NodeNG) or None """ self.test = test self.body = body self.orelse = orelse @decorators.cachedproperty def blockstart_tolineno(self): """The line on which the beginning of this block ends. :type: int """ return self.test.tolineno def block_range(self, lineno): """Get a range from the given line number to where this node ends. :param lineno: The line number to start the range at. :type lineno: int :returns: The range of line numbers that this node belongs to, starting at the given line number. :rtype: tuple(int, int) """ if lineno == self.body[0].fromlineno: return lineno, lineno if lineno <= self.body[-1].tolineno: return lineno, self.body[-1].tolineno return self._elsed_block_range(lineno, self.orelse, self.body[0].fromlineno - 1) def get_children(self): yield self.test yield from self.body yield from self.orelse def has_elif_block(self): return len(self.orelse) == 1 and isinstance(self.orelse[0], If) class IfExp(NodeNG): """Class representing an :class:`ast.IfExp` node. >>> node = astroid.extract_node('value if condition else other') >>> node """ _astroid_fields = ('test', 'body', 'orelse') test = None """The condition that the statement tests. :type: NodeNG or None """ body = None """The contents of the block. :type: list(NodeNG) or None """ orelse = None """The contents of the ``else`` block. :type: list(NodeNG) or None """ def postinit(self, test=None, body=None, orelse=None): """Do some setup after initialisation. :param test: The condition that the statement tests. :type test: NodeNG or None :param body: The contents of the block. :type body: list(NodeNG) or None :param orelse: The contents of the ``else`` block. :type orelse: list(NodeNG) or None """ self.test = test self.body = body self.orelse = orelse def get_children(self): yield self.test yield self.body yield self.orelse def op_left_associative(self): # `1 if True else 2 if False else 3` is parsed as # `1 if True else (2 if False else 3)` return False class Import(mixins.NoChildrenMixin, mixins.ImportFromMixin, Statement): """Class representing an :class:`ast.Import` node. >>> node = astroid.extract_node('import astroid') >>> node """ _other_fields = ('names',) def __init__(self, names=None, lineno=None, col_offset=None, parent=None): """ :param names: The names being imported. :type names: list(tuple(str, str or None)) or None :param lineno: The line that this node appears on in the source code. :type lineno: int or None :param col_offset: The column that this node appears on in the source code. :type col_offset: int or None :param parent: The parent node in the syntax tree. :type parent: NodeNG or None """ self.names = names """The names being imported. Each entry is a :class:`tuple` of the name being imported, and the alias that the name is assigned to (if any). :type: list(tuple(str, str or None)) or None """ super(Import, self).__init__(lineno, col_offset, parent) class Index(NodeNG): """Class representing an :class:`ast.Index` node. An :class:`Index` is a simple subscript. >>> node = astroid.extract_node('things[1]') >>> node >>> node.slice """ _astroid_fields = ('value',) value = None """The value to subscript with. :type: NodeNG or None """ def postinit(self, value=None): """Do some setup after initialisation. :param value: The value to subscript with. :type value: NodeNG or None """ self.value = value def get_children(self): yield self.value class Keyword(NodeNG): """Class representing an :class:`ast.keyword` node. >>> node = astroid.extract_node('function(a_kwarg=True)') >>> node >>> node.keywords [] """ _astroid_fields = ('value',) _other_fields = ('arg',) value = None """The value being assigned to the keyword argument. :type: NodeNG or None """ def __init__(self, arg=None, lineno=None, col_offset=None, parent=None): """ :param arg: The argument being assigned to. :type arg: Name or None :param lineno: The line that this node appears on in the source code. :type lineno: int or None :param col_offset: The column that this node appears on in the source code. :type col_offset: int or None :param parent: The parent node in the syntax tree. :type parent: NodeNG or None """ self.arg = arg """The argument being assigned to. :type: Name or None """ super(Keyword, self).__init__(lineno, col_offset, parent) def postinit(self, value=None): """Do some setup after initialisation. :param value: The value being assigned to the ketword argument. :type value: NodeNG or None """ self.value = value def get_children(self): yield self.value class List(_BaseContainer): """Class representing an :class:`ast.List` node. >>> node = astroid.extract_node('[1, 2, 3]') >>> node """ _other_fields = ('ctx',) def __init__(self, ctx=None, lineno=None, col_offset=None, parent=None): """ :param ctx: Whether the list is assigned to or loaded from. :type ctx: Context or None :param lineno: The line that this node appears on in the source code. :type lineno: int or None :param col_offset: The column that this node appears on in the source code. :type col_offset: int or None :param parent: The parent node in the syntax tree. :type parent: NodeNG or None """ self.ctx = ctx """Whether the list is assigned to or loaded from. :type: Context or None """ super(List, self).__init__(lineno, col_offset, parent) def pytype(self): """Get the name of the type that this node represents. :returns: The name of the type. :rtype: str """ return '%s.list' % BUILTINS def getitem(self, index, context=None): """Get an item from this node. :param index: The node to use as a subscript index. :type index: Const or Slice """ return _container_getitem(self, self.elts, index, context=context) class Nonlocal(mixins.NoChildrenMixin, Statement): """Class representing an :class:`ast.Nonlocal` node. >>> node = astroid.extract_node(''' def function(): nonlocal var ''') >>> node >>> node.body[0] """ _other_fields = ('names',) def __init__(self, names, lineno=None, col_offset=None, parent=None): """ :param names: The names being decalred as not local. :type names: list(str) :param lineno: The line that this node appears on in the source code. :type lineno: int or None :param col_offset: The column that this node appears on in the source code. :type col_offset: int or None :param parent: The parent node in the syntax tree. :type parent: NodeNG or None """ self.names = names """The names being declared as not local. :type: list(str) """ super(Nonlocal, self).__init__(lineno, col_offset, parent) def _infer_name(self, frame, name): return name class Pass(mixins.NoChildrenMixin, Statement): """Class representing an :class:`ast.Pass` node. >>> node = astroid.extract_node('pass') >>> node """ class Print(Statement): """Class representing an :class:`ast.Print` node. >>> node = astroid.extract_node('print "A message"') >>> node """ _astroid_fields = ('dest', 'values',) dest = None """Where to print to. :type: NodeNG or None """ values = None """What to print. :type: list(NodeNG) or None """ def __init__(self, nl=None, lineno=None, col_offset=None, parent=None): """ :param nl: Whether to print a new line. :type nl: bool or None :param lineno: The line that this node appears on in the source code. :type lineno: int or None :param col_offset: The column that this node appears on in the source code. :type col_offset: int or None :param parent: The parent node in the syntax tree. :type parent: NodeNG or None """ self.nl = nl """Whether to print a new line. :type: bool or None """ super(Print, self).__init__(lineno, col_offset, parent) def postinit(self, dest=None, values=None): """Do some setup after initialisation. :param dest: Where to print to. :type dest: NodeNG or None :param values: What to print. :type values: list(NodeNG) or None """ self.dest = dest self.values = values class Raise(Statement): """Class representing an :class:`ast.Raise` node. >>> node = astroid.extract_node('raise RuntimeError("Something bad happened!")') >>> node """ exc = None """What is being raised. :type: NodeNG or None """ _astroid_fields = ('exc', 'cause') cause = None """The exception being used to raise this one. :type: NodeNG or None """ def postinit(self, exc=None, cause=None): """Do some setup after initialisation. :param exc: What is being raised. :type exc: NodeNG or None :param cause: The exception being used to raise this one. :type cause: NodeNG or None """ self.exc = exc self.cause = cause def raises_not_implemented(self): """Check if this node raises a :class:`NotImplementedError`. :returns: True if this node raises a :class:`NotImplementedError`, False otherwise. :rtype: bool """ if not self.exc: return False for name in self.exc._get_name_nodes(): if name.name == 'NotImplementedError': return True return False def get_children(self): if self.exc is not None: yield self.exc if self.cause is not None: yield self.cause class Return(Statement): """Class representing an :class:`ast.Return` node. >>> node = astroid.extract_node('return True') >>> node """ _astroid_fields = ('value',) value = None """The value being returned. :type: NodeNG or None """ def postinit(self, value=None): """Do some setup after initialisation. :param value: The value being returned. :type value: NodeNG or None """ self.value = value def get_children(self): if self.value is not None: yield self.value def is_tuple_return(self): return isinstance(self.value, Tuple) def _get_return_nodes_skip_functions(self): yield self class Set(_BaseContainer): """Class representing an :class:`ast.Set` node. >>> node = astroid.extract_node('{1, 2, 3}') >>> node """ def pytype(self): """Get the name of the type that this node represents. :returns: The name of the type. :rtype: str """ return '%s.set' % BUILTINS class Slice(NodeNG): """Class representing an :class:`ast.Slice` node. >>> node = astroid.extract_node('things[1:3]') >>> node >>> node.slice """ _astroid_fields = ('lower', 'upper', 'step') lower = None """The lower index in the slice. :type: NodeNG or None """ upper = None """The upper index in the slice. :type: NodeNG or None """ step = None """The step to take between indexes. :type: NodeNG or None """ def postinit(self, lower=None, upper=None, step=None): """Do some setup after initialisation. :param lower: The lower index in the slice. :value lower: NodeNG or None :param upper: The upper index in the slice. :value upper: NodeNG or None :param step: The step to take between index. :param step: NodeNG or None """ self.lower = lower self.upper = upper self.step = step def _wrap_attribute(self, attr): """Wrap the empty attributes of the Slice in a Const node.""" if not attr: const = const_factory(attr) const.parent = self return const return attr @decorators.cachedproperty def _proxied(self): builtins = MANAGER.astroid_cache[BUILTINS] return builtins.getattr('slice')[0] def pytype(self): """Get the name of the type that this node represents. :returns: The name of the type. :rtype: str """ return '%s.slice' % BUILTINS def igetattr(self, attrname, context=None): """Infer the possible values of the given attribute on the slice. :param attrname: The name of the attribute to infer. :type attrname: str :returns: The inferred possible values. :rtype: iterable(NodeNG) """ if attrname == 'start': yield self._wrap_attribute(self.lower) elif attrname == 'stop': yield self._wrap_attribute(self.upper) elif attrname == 'step': yield self._wrap_attribute(self.step) else: yield from self.getattr(attrname, context=context) def getattr(self, attrname, context=None): return self._proxied.getattr(attrname, context) def get_children(self): if self.lower is not None: yield self.lower if self.upper is not None: yield self.upper if self.step is not None: yield self.step class Starred(mixins.ParentAssignTypeMixin, NodeNG): """Class representing an :class:`ast.Starred` node. >>> node = astroid.extract_node('*args') >>> node """ _astroid_fields = ('value',) _other_fields = ('ctx', ) value = None """What is being unpacked. :type: NodeNG or None """ def __init__(self, ctx=None, lineno=None, col_offset=None, parent=None): """ :param ctx: Whether the list is assigned to or loaded from. :type ctx: Context or None :param lineno: The line that this node appears on in the source code. :type lineno: int or None :param col_offset: The column that this node appears on in the source code. :type col_offset: int or None :param parent: The parent node in the syntax tree. :type parent: NodeNG or None """ self.ctx = ctx """Whether the starred item is assigned to or loaded from. :type: Context or None """ super(Starred, self).__init__(lineno=lineno, col_offset=col_offset, parent=parent) def postinit(self, value=None): """Do some setup after initialisation. :param value: What is being unpacked. :type value: NodeNG or None """ self.value = value def get_children(self): yield self.value class Subscript(NodeNG): """Class representing an :class:`ast.Subscript` node. >>> node = astroid.extract_node('things[1:3]') >>> node """ _astroid_fields = ('value', 'slice') _other_fields = ('ctx', ) value = None """What is being indexed. :type: NodeNG or None """ slice = None """The slice being used to lookup. :type: NodeNG or None """ def __init__(self, ctx=None, lineno=None, col_offset=None, parent=None): """ :param ctx: Whether the subscripted item is assigned to or loaded from. :type ctx: Context or None :param lineno: The line that this node appears on in the source code. :type lineno: int or None :param col_offset: The column that this node appears on in the source code. :type col_offset: int or None :param parent: The parent node in the syntax tree. :type parent: NodeNG or None """ self.ctx = ctx """Whether the subscripted item is assigned to or loaded from. :type: Context or None """ super(Subscript, self).__init__(lineno=lineno, col_offset=col_offset, parent=parent) # pylint: disable=redefined-builtin; had to use the same name as builtin ast module. def postinit(self, value=None, slice=None): """Do some setup after initialisation. :param value: What is being indexed. :type value: NodeNG or None :param slice: The slice being used to lookup. :type slice: NodeNG or None """ self.value = value self.slice = slice def get_children(self): yield self.value yield self.slice class TryExcept(mixins.MultiLineBlockMixin, mixins.BlockRangeMixIn, Statement): """Class representing an :class:`ast.TryExcept` node. >>> node = astroid.extract_node(''' try: do_something() except Exception as error: print("Error!") ''') >>> node """ _astroid_fields = ('body', 'handlers', 'orelse',) _multi_line_block_fields = ('body', 'handlers', 'orelse') body = None """The contents of the block to catch exceptions from. :type: list(NodeNG) or None """ handlers = None """The exception handlers. :type: list(ExceptHandler) or None """ orelse = None """The contents of the ``else`` block. :type: list(NodeNG) or None """ def postinit(self, body=None, handlers=None, orelse=None): """Do some setup after initialisation. :param body: The contents of the block to catch exceptions from. :type body: list(NodeNG) or None :param handlers: The exception handlers. :type handlers: list(ExceptHandler) or None :param orelse: The contents of the ``else`` block. :type orelse: list(NodeNG) or None """ self.body = body self.handlers = handlers self.orelse = orelse def _infer_name(self, frame, name): return name def block_range(self, lineno): """Get a range from the given line number to where this node ends. :param lineno: The line number to start the range at. :type lineno: int :returns: The range of line numbers that this node belongs to, starting at the given line number. :rtype: tuple(int, int) """ last = None for exhandler in self.handlers: if exhandler.type and lineno == exhandler.type.fromlineno: return lineno, lineno if exhandler.body[0].fromlineno <= lineno <= exhandler.body[-1].tolineno: return lineno, exhandler.body[-1].tolineno if last is None: last = exhandler.body[0].fromlineno - 1 return self._elsed_block_range(lineno, self.orelse, last) def get_children(self): yield from self.body yield from self.handlers or () yield from self.orelse or () class TryFinally(mixins.MultiLineBlockMixin, mixins.BlockRangeMixIn, Statement): """Class representing an :class:`ast.TryFinally` node. >>> node = astroid.extract_node(''' try: do_something() except Exception as error: print("Error!") finally: print("Cleanup!") ''') >>> node """ _astroid_fields = ('body', 'finalbody',) _multi_line_block_fields = ('body', 'finalbody') body = None """The try-except that the finally is attached to. :type: list(TryExcept) or None """ finalbody = None """The contents of the ``finally`` block. :type: list(NodeNG) or None """ def postinit(self, body=None, finalbody=None): """Do some setup after initialisation. :param body: The try-except that the finally is attached to. :type body: list(TryExcept) or None :param finalbody: The contents of the ``finally`` block. :type finalbody: list(NodeNG) or None """ self.body = body self.finalbody = finalbody def block_range(self, lineno): """Get a range from the given line number to where this node ends. :param lineno: The line number to start the range at. :type lineno: int :returns: The range of line numbers that this node belongs to, starting at the given line number. :rtype: tuple(int, int) """ child = self.body[0] # py2.5 try: except: finally: if (isinstance(child, TryExcept) and child.fromlineno == self.fromlineno and lineno > self.fromlineno and lineno <= child.tolineno): return child.block_range(lineno) return self._elsed_block_range(lineno, self.finalbody) def get_children(self): yield from self.body yield from self.finalbody class Tuple(_BaseContainer): """Class representing an :class:`ast.Tuple` node. >>> node = astroid.extract_node('(1, 2, 3)') >>> node """ _other_fields = ('ctx',) def __init__(self, ctx=None, lineno=None, col_offset=None, parent=None): """ :param ctx: Whether the tuple is assigned to or loaded from. :type ctx: Context or None :param lineno: The line that this node appears on in the source code. :type lineno: int or None :param col_offset: The column that this node appears on in the source code. :type col_offset: int or None :param parent: The parent node in the syntax tree. :type parent: NodeNG or None """ self.ctx = ctx """Whether the tuple is assigned to or loaded from. :type: Context or None """ super(Tuple, self).__init__(lineno, col_offset, parent) def pytype(self): """Get the name of the type that this node represents. :returns: The name of the type. :rtype: str """ return '%s.tuple' % BUILTINS def getitem(self, index, context=None): """Get an item from this node. :param index: The node to use as a subscript index. :type index: Const or Slice """ return _container_getitem(self, self.elts, index, context=context) class UnaryOp(NodeNG): """Class representing an :class:`ast.UnaryOp` node. >>> node = astroid.extract_node('-5') >>> node """ _astroid_fields = ('operand',) _other_fields = ('op',) operand = None """What the unary operator is applied to. :type: NodeNG or None """ def __init__(self, op=None, lineno=None, col_offset=None, parent=None): """ :param op: The operator. :type: str or None :param lineno: The line that this node appears on in the source code. :type lineno: int or None :param col_offset: The column that this node appears on in the source code. :type col_offset: int or None :param parent: The parent node in the syntax tree. :type parent: NodeNG or None """ self.op = op """The operator. :type: str or None """ super(UnaryOp, self).__init__(lineno, col_offset, parent) def postinit(self, operand=None): """Do some setup after initialisation. :param operand: What the unary operator is applied to. :type operand: NodeNG or None """ self.operand = operand # This is set by inference.py def _infer_unaryop(self, context=None): raise NotImplementedError def type_errors(self, context=None): """Get a list of type errors which can occur during inference. Each TypeError is represented by a :class:`BadBinaryOperationMessage`, which holds the original exception. :returns: The list of possible type errors. :rtype: list(BadBinaryOperationMessage) """ try: results = self._infer_unaryop(context=context) return [result for result in results if isinstance(result, util.BadUnaryOperationMessage)] except exceptions.InferenceError: return [] def get_children(self): yield self.operand def op_precedence(self): if self.op == 'not': return OP_PRECEDENCE[self.op] return super().op_precedence() class While(mixins.MultiLineBlockMixin, mixins.BlockRangeMixIn, Statement): """Class representing an :class:`ast.While` node. >>> node = astroid.extract_node(''' while condition(): print("True") ''') >>> node """ _astroid_fields = ('test', 'body', 'orelse',) _multi_line_block_fields = ('body', 'orelse') test = None """The condition that the loop tests. :type: NodeNG or None """ body = None """The contents of the loop. :type: list(NodeNG) or None """ orelse = None """The contents of the ``else`` block. :type: list(NodeNG) or None """ def postinit(self, test=None, body=None, orelse=None): """Do some setup after initialisation. :param test: The condition that the loop tests. :type test: NodeNG or None :param body: The contents of the loop. :type body: list(NodeNG) or None :param orelse: The contents of the ``else`` block. :type orelse: list(NodeNG) or None """ self.test = test self.body = body self.orelse = orelse @decorators.cachedproperty def blockstart_tolineno(self): """The line on which the beginning of this block ends. :type: int """ return self.test.tolineno def block_range(self, lineno): """Get a range from the given line number to where this node ends. :param lineno: The line number to start the range at. :type lineno: int :returns: The range of line numbers that this node belongs to, starting at the given line number. :rtype: tuple(int, int) """ return self. _elsed_block_range(lineno, self.orelse) def get_children(self): yield self.test yield from self.body yield from self.orelse class With(mixins.MultiLineBlockMixin, mixins.BlockRangeMixIn, mixins.AssignTypeMixin, Statement): """Class representing an :class:`ast.With` node. >>> node = astroid.extract_node(''' with open(file_path) as file_: print(file_.read()) ''') >>> node """ _astroid_fields = ('items', 'body',) _other_other_fields = ('type_annotation',) _multi_line_block_fields = ('body',) items = None """The pairs of context managers and the names they are assigned to. :type: list(tuple(NodeNG, AssignName or None)) or None """ body = None """The contents of the ``with`` block. :type: list(NodeNG) or None """ type_annotation = None """If present, this will contain the type annotation passed by a type comment :type: NodeNG or None """ def postinit(self, items=None, body=None, type_annotation=None): """Do some setup after initialisation. :param items: The pairs of context managers and the names they are assigned to. :type items: list(tuple(NodeNG, AssignName or None)) or None :param body: The contents of the ``with`` block. :type body: list(NodeNG) or None """ self.items = items self.body = body self.type_annotation = type_annotation @decorators.cachedproperty def blockstart_tolineno(self): """The line on which the beginning of this block ends. :type: int """ return self.items[-1][0].tolineno def get_children(self): """Get the child nodes below this node. :returns: The children. :rtype: iterable(NodeNG) """ for expr, var in self.items: yield expr if var: yield var yield from self.body class AsyncWith(With): """Asynchronous ``with`` built with the ``async`` keyword.""" class Yield(NodeNG): """Class representing an :class:`ast.Yield` node. >>> node = astroid.extract_node('yield True') >>> node """ _astroid_fields = ('value',) value = None """The value to yield. :type: NodeNG or None """ def postinit(self, value=None): """Do some setup after initialisation. :param value: The value to yield. :type value: NodeNG or None """ self.value = value def get_children(self): if self.value is not None: yield self.value def _get_yield_nodes_skip_lambdas(self): yield self class YieldFrom(Yield): """Class representing an :class:`ast.YieldFrom` node.""" class DictUnpack(mixins.NoChildrenMixin, NodeNG): """Represents the unpacking of dicts into dicts using :pep:`448`.""" class FormattedValue(NodeNG): """Class representing an :class:`ast.FormattedValue` node. Represents a :pep:`498` format string. >>> node = astroid.extract_node('f"Format {type_}"') >>> node >>> node.values [, ] """ _astroid_fields = ('value', 'format_spec') value = None """The value to be formatted into the string. :type: NodeNG or None """ conversion = None """The type of formatting to be applied to the value. .. seealso:: :class:`ast.FormattedValue` :type: int or None """ format_spec = None """The formatting to be applied to the value. .. seealso:: :class:`ast.FormattedValue` :type: JoinedStr or None """ def postinit(self, value, conversion=None, format_spec=None): """Do some setup after initialisation. :param value: The value to be formatted into the string. :type value: NodeNG :param conversion: The type of formatting to be applied to the value. :type conversion: int or None :param format_spec: The formatting to be applied to the value. :type format_spec: JoinedStr or None """ self.value = value self.conversion = conversion self.format_spec = format_spec def get_children(self): yield self.value if self.format_spec is not None: yield self.format_spec class JoinedStr(NodeNG): """Represents a list of string expressions to be joined. >>> node = astroid.extract_node('f"Format {type_}"') >>> node """ _astroid_fields = ('values',) values = None """The string expressions to be joined. :type: list(FormattedValue or Const) or None """ def postinit(self, values=None): """Do some setup after initialisation. :param value: The string expressions to be joined. :type: list(FormattedValue or Const) or None """ self.values = values def get_children(self): yield from self.values class Unknown(mixins.AssignTypeMixin, NodeNG): """This node represents a node in a constructed AST where introspection is not possible. At the moment, it's only used in the args attribute of FunctionDef nodes where function signature introspection failed. """ name = "Unknown" def qname(self): return "Unknown" def infer(self, context=None, **kwargs): """Inference on an Unknown node immediately terminates.""" yield util.Uninferable # constants ############################################################## CONST_CLS = { list: List, tuple: Tuple, dict: Dict, set: Set, type(None): Const, type(NotImplemented): Const, } def _update_const_classes(): """update constant classes, so the keys of CONST_CLS can be reused""" klasses = (bool, int, float, complex, str, bytes) for kls in klasses: CONST_CLS[kls] = Const _update_const_classes() def _two_step_initialization(cls, value): instance = cls() instance.postinit(value) return instance def _dict_initialization(cls, value): if isinstance(value, dict): value = tuple(value.items()) return _two_step_initialization(cls, value) _CONST_CLS_CONSTRUCTORS = { List: _two_step_initialization, Tuple: _two_step_initialization, Dict: _dict_initialization, Set: _two_step_initialization, Const: lambda cls, value: cls(value) } def const_factory(value): """return an astroid node for a python value""" # XXX we should probably be stricter here and only consider stuff in # CONST_CLS or do better treatment: in case where value is not in CONST_CLS, # we should rather recall the builder on this value than returning an empty # node (another option being that const_factory shouldn't be called with something # not in CONST_CLS) assert not isinstance(value, NodeNG) # Hack for ignoring elements of a sequence # or a mapping, in order to avoid transforming # each element to an AST. This is fixed in 2.0 # and this approach is a temporary hack. if isinstance(value, (list, set, tuple, dict)): elts = [] else: elts = value try: initializer_cls = CONST_CLS[value.__class__] initializer = _CONST_CLS_CONSTRUCTORS[initializer_cls] return initializer(initializer_cls, elts) except (KeyError, AttributeError): node = EmptyNode() node.object = value return node def is_from_decorator(node): """Return True if the given node is the child of a decorator""" parent = node.parent while parent is not None: if isinstance(parent, Decorators): return True parent = parent.parent return False astroid-2.0.1/astroid/nodes.py0000644000076500000240000000472113324063433017064 0ustar claudiustaff00000000000000# Copyright (c) 2006-2011, 2013 LOGILAB S.A. (Paris, FRANCE) # Copyright (c) 2010 Daniel Harding # Copyright (c) 2014-2018 Claudiu Popa # Copyright (c) 2014 Google, Inc. # Copyright (c) 2015-2016 Ceridwen # Copyright (c) 2016 Jared Garst # Copyright (c) 2017 Ashley Whetter # Copyright (c) 2017 rr- # Copyright (c) 2018 Bryce Guinta # Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html # For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER """Every available node class. .. seealso:: :doc:`ast documentation ` All nodes inherit from :class:`~astroid.node_classes.NodeNG`. """ # pylint: disable=unused-import,redefined-builtin from astroid.node_classes import ( Arguments, AssignAttr, Assert, Assign, AnnAssign, AssignName, AugAssign, Repr, BinOp, BoolOp, Break, Call, Compare, Comprehension, Const, Continue, Decorators, DelAttr, DelName, Delete, Dict, Expr, Ellipsis, EmptyNode, ExceptHandler, Exec, ExtSlice, For, ImportFrom, Attribute, Global, If, IfExp, Import, Index, Keyword, List, Name, Nonlocal, Pass, Print, Raise, Return, Set, Slice, Starred, Subscript, TryExcept, TryFinally, Tuple, UnaryOp, While, With, Yield, YieldFrom, const_factory, AsyncFor, Await, AsyncWith, FormattedValue, JoinedStr, # Node not present in the builtin ast module. DictUnpack, Unknown, ) from astroid.scoped_nodes import ( Module, GeneratorExp, Lambda, DictComp, ListComp, SetComp, FunctionDef, ClassDef, AsyncFunctionDef, ) ALL_NODE_CLASSES = ( AsyncFunctionDef, AsyncFor, AsyncWith, Await, Arguments, AssignAttr, Assert, Assign, AnnAssign, AssignName, AugAssign, Repr, BinOp, BoolOp, Break, Call, ClassDef, Compare, Comprehension, Const, Continue, Decorators, DelAttr, DelName, Delete, Dict, DictComp, DictUnpack, Expr, Ellipsis, EmptyNode, ExceptHandler, Exec, ExtSlice, For, ImportFrom, FunctionDef, Attribute, GeneratorExp, Global, If, IfExp, Import, Index, Keyword, Lambda, List, ListComp, Name, Nonlocal, Module, Pass, Print, Raise, Return, Set, SetComp, Slice, Starred, Subscript, TryExcept, TryFinally, Tuple, UnaryOp, While, With, Yield, YieldFrom, FormattedValue, JoinedStr, ) astroid-2.0.1/astroid/objects.py0000644000076500000240000002015713324063433017406 0ustar claudiustaff00000000000000# Copyright (c) 2015-2016, 2018 Claudiu Popa # Copyright (c) 2015-2016 Ceridwen # Copyright (c) 2015 Florian Bruhin # Copyright (c) 2016 Derek Gustafson # Copyright (c) 2018 Bryce Guinta # Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html # For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER """ Inference objects are a way to represent composite AST nodes, which are used only as inference results, so they can't be found in the original AST tree. For instance, inferring the following frozenset use, leads to an inferred FrozenSet: Call(func=Name('frozenset'), args=Tuple(...)) """ import builtins from astroid import bases from astroid import decorators from astroid import exceptions from astroid import MANAGER from astroid import node_classes from astroid import scoped_nodes from astroid import util BUILTINS = builtins.__name__ objectmodel = util.lazy_import('interpreter.objectmodel') class FrozenSet(node_classes._BaseContainer): """class representing a FrozenSet composite node""" def pytype(self): return '%s.frozenset' % BUILTINS def _infer(self, context=None): yield self @decorators.cachedproperty def _proxied(self): # pylint: disable=method-hidden ast_builtins = MANAGER.astroid_cache[BUILTINS] return ast_builtins.getattr('frozenset')[0] class Super(node_classes.NodeNG): """Proxy class over a super call. This class offers almost the same behaviour as Python's super, which is MRO lookups for retrieving attributes from the parents. The *mro_pointer* is the place in the MRO from where we should start looking, not counting it. *mro_type* is the object which provides the MRO, it can be both a type or an instance. *self_class* is the class where the super call is, while *scope* is the function where the super call is. """ # pylint: disable=unnecessary-lambda special_attributes = util.lazy_descriptor(lambda: objectmodel.SuperModel()) # pylint: disable=super-init-not-called def __init__(self, mro_pointer, mro_type, self_class, scope): self.type = mro_type self.mro_pointer = mro_pointer self._class_based = False self._self_class = self_class self._scope = scope def _infer(self, context=None): yield self def super_mro(self): """Get the MRO which will be used to lookup attributes in this super.""" if not isinstance(self.mro_pointer, scoped_nodes.ClassDef): raise exceptions.SuperError( "The first argument to super must be a subtype of " "type, not {mro_pointer}.", super_=self) if isinstance(self.type, scoped_nodes.ClassDef): # `super(type, type)`, most likely in a class method. self._class_based = True mro_type = self.type else: mro_type = getattr(self.type, '_proxied', None) if not isinstance(mro_type, (bases.Instance, scoped_nodes.ClassDef)): raise exceptions.SuperError( "The second argument to super must be an " "instance or subtype of type, not {type}.", super_=self) if not mro_type.newstyle: raise exceptions.SuperError("Unable to call super on old-style classes.", super_=self) mro = mro_type.mro() if self.mro_pointer not in mro: raise exceptions.SuperError( "The second argument to super must be an " "instance or subtype of type, not {type}.", super_=self) index = mro.index(self.mro_pointer) return mro[index + 1:] @decorators.cachedproperty def _proxied(self): ast_builtins = MANAGER.astroid_cache[BUILTINS] return ast_builtins.getattr('super')[0] def pytype(self): return '%s.super' % BUILTINS def display_type(self): return 'Super of' @property def name(self): """Get the name of the MRO pointer.""" return self.mro_pointer.name def qname(self): return "super" def igetattr(self, name, context=None): """Retrieve the inferred values of the given attribute name.""" if name in self.special_attributes: yield self.special_attributes.lookup(name) return try: mro = self.super_mro() # Don't let invalid MROs or invalid super calls # leak out as is from this function. except exceptions.SuperError as exc: raise exceptions.AttributeInferenceError( ('Lookup for {name} on {target!r} because super call {super!r} ' 'is invalid.'), target=self, attribute=name, context=context, super_=exc.super_) from exc except exceptions.MroError as exc: raise exceptions.AttributeInferenceError( ('Lookup for {name} on {target!r} failed because {cls!r} has an ' 'invalid MRO.'), target=self, attribute=name, context=context, mros=exc.mros, cls=exc.cls) from exc found = False for cls in mro: if name not in cls.locals: continue found = True for inferred in bases._infer_stmts([cls[name]], context, frame=self): if not isinstance(inferred, scoped_nodes.FunctionDef): yield inferred continue # We can obtain different descriptors from a super depending # on what we are accessing and where the super call is. if inferred.type == 'classmethod': yield bases.BoundMethod(inferred, cls) elif self._scope.type == 'classmethod' and inferred.type == 'method': yield inferred elif self._class_based or inferred.type == 'staticmethod': yield inferred elif bases._is_property(inferred): # TODO: support other descriptors as well. for value in inferred.infer_call_result(self, context): yield value else: yield bases.BoundMethod(inferred, cls) if not found: raise exceptions.AttributeInferenceError(target=self, attribute=name, context=context) def getattr(self, name, context=None): return list(self.igetattr(name, context=context)) class ExceptionInstance(bases.Instance): """Class for instances of exceptions It has special treatment for some of the exceptions's attributes, which are transformed at runtime into certain concrete objects, such as the case of .args. """ # pylint: disable=unnecessary-lambda special_attributes = util.lazy_descriptor(lambda: objectmodel.ExceptionInstanceModel()) class DictInstance(bases.Instance): """Special kind of instances for dictionaries This instance knows the underlying object model of the dictionaries, which means that methods such as .values or .items can be properly inferred. """ # pylint: disable=unnecessary-lambda special_attributes = util.lazy_descriptor(lambda: objectmodel.DictModel()) # Custom objects tailored for dictionaries, which are used to # disambiguate between the types of Python 2 dict's method returns # and Python 3 (where they return set like objects). class DictItems(bases.Proxy): __str__ = node_classes.NodeNG.__str__ __repr__ = node_classes.NodeNG.__repr__ class DictKeys(bases.Proxy): __str__ = node_classes.NodeNG.__str__ __repr__ = node_classes.NodeNG.__repr__ class DictValues(bases.Proxy): __str__ = node_classes.NodeNG.__str__ __repr__ = node_classes.NodeNG.__repr__ # TODO: Hack to solve the circular import problem between node_classes and objects # This is not needed in 2.0, which has a cleaner design overall node_classes.Dict.__bases__ = (node_classes.NodeNG, DictInstance) astroid-2.0.1/astroid/protocols.py0000644000076500000240000006431213324063433020002 0ustar claudiustaff00000000000000# -*- coding: utf-8 -*- # Copyright (c) 2009-2011, 2013-2014 LOGILAB S.A. (Paris, FRANCE) # Copyright (c) 2014-2018 Claudiu Popa # Copyright (c) 2014 Google, Inc. # Copyright (c) 2014 Eevee (Alex Munroe) # Copyright (c) 2015-2016 Ceridwen # Copyright (c) 2015 Dmitry Pribysh # Copyright (c) 2016 Derek Gustafson # Copyright (c) 2017-2018 Ashley Whetter # Copyright (c) 2017 Łukasz Rogalski # Copyright (c) 2017 rr- # Copyright (c) 2018 Bryce Guinta # Copyright (c) 2018 Nick Drozd # Copyright (c) 2018 HoverHell # Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html # For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER """this module contains a set of functions to handle python protocols for nodes where it makes sense. """ import collections import operator as operator_mod import sys from astroid import Store from astroid import arguments from astroid import bases from astroid import context as contextmod from astroid import exceptions from astroid import decorators from astroid import node_classes from astroid import helpers from astroid import nodes from astroid import util raw_building = util.lazy_import('raw_building') objects = util.lazy_import('objects') def _reflected_name(name): return "__r" + name[2:] def _augmented_name(name): return "__i" + name[2:] _CONTEXTLIB_MGR = 'contextlib.contextmanager' BIN_OP_METHOD = {'+': '__add__', '-': '__sub__', '/': '__truediv__', '//': '__floordiv__', '*': '__mul__', '**': '__pow__', '%': '__mod__', '&': '__and__', '|': '__or__', '^': '__xor__', '<<': '__lshift__', '>>': '__rshift__', '@': '__matmul__' } REFLECTED_BIN_OP_METHOD = { key: _reflected_name(value) for (key, value) in BIN_OP_METHOD.items() } AUGMENTED_OP_METHOD = { key + "=": _augmented_name(value) for (key, value) in BIN_OP_METHOD.items() } UNARY_OP_METHOD = {'+': '__pos__', '-': '__neg__', '~': '__invert__', 'not': None, # XXX not '__nonzero__' } _UNARY_OPERATORS = { '+': operator_mod.pos, '-': operator_mod.neg, '~': operator_mod.invert, 'not': operator_mod.not_, } def _infer_unary_op(obj, op): func = _UNARY_OPERATORS[op] value = func(obj) return nodes.const_factory(value) nodes.Tuple.infer_unary_op = lambda self, op: _infer_unary_op(tuple(self.elts), op) nodes.List.infer_unary_op = lambda self, op: _infer_unary_op(self.elts, op) nodes.Set.infer_unary_op = lambda self, op: _infer_unary_op(set(self.elts), op) nodes.Const.infer_unary_op = lambda self, op: _infer_unary_op(self.value, op) nodes.Dict.infer_unary_op = lambda self, op: _infer_unary_op(dict(self.items), op) # Binary operations BIN_OP_IMPL = {'+': lambda a, b: a + b, '-': lambda a, b: a - b, '/': lambda a, b: a / b, '//': lambda a, b: a // b, '*': lambda a, b: a * b, '**': lambda a, b: a ** b, '%': lambda a, b: a % b, '&': lambda a, b: a & b, '|': lambda a, b: a | b, '^': lambda a, b: a ^ b, '<<': lambda a, b: a << b, '>>': lambda a, b: a >> b, } if sys.version_info >= (3, 5): # MatMult is available since Python 3.5+. BIN_OP_IMPL['@'] = operator_mod.matmul for _KEY, _IMPL in list(BIN_OP_IMPL.items()): BIN_OP_IMPL[_KEY + '='] = _IMPL @decorators.yes_if_nothing_inferred def const_infer_binary_op(self, opnode, operator, other, context, _): not_implemented = nodes.Const(NotImplemented) if isinstance(other, nodes.Const): try: impl = BIN_OP_IMPL[operator] try: yield nodes.const_factory(impl(self.value, other.value)) except TypeError: # ArithmeticError is not enough: float >> float is a TypeError yield not_implemented except Exception: # pylint: disable=broad-except yield util.Uninferable except TypeError: yield not_implemented elif isinstance(self.value, str) and operator == '%': # TODO(cpopa): implement string interpolation later on. yield util.Uninferable else: yield not_implemented nodes.Const.infer_binary_op = const_infer_binary_op def _multiply_seq_by_int(self, opnode, other, context): node = self.__class__(parent=opnode) elts = [] filtered_elts = (elt for elt in self.elts if elt is not util.Uninferable) for elt in filtered_elts: infered = helpers.safe_infer(elt, context) if infered is None: infered = util.Uninferable elts.append(infered) node.elts = elts * other.value return node def _filter_uninferable_nodes(elts, context): for elt in elts: if elt is util.Uninferable: yield nodes.Unknown() else: for inferred in elt.infer(context): if inferred is not util.Uninferable: yield inferred else: yield nodes.Unknown() @decorators.yes_if_nothing_inferred def tl_infer_binary_op(self, opnode, operator, other, context, method): not_implemented = nodes.Const(NotImplemented) if isinstance(other, self.__class__) and operator == '+': node = self.__class__(parent=opnode) elts = list(_filter_uninferable_nodes(self.elts, context)) elts += list(_filter_uninferable_nodes(other.elts, context)) node.elts = elts yield node elif isinstance(other, nodes.Const) and operator == '*': if not isinstance(other.value, int): yield not_implemented return yield _multiply_seq_by_int(self, opnode, other, context) elif isinstance(other, bases.Instance) and operator == '*': # Verify if the instance supports __index__. as_index = helpers.class_instance_as_index(other) if not as_index: yield util.Uninferable else: yield _multiply_seq_by_int(self, opnode, as_index, context) else: yield not_implemented nodes.Tuple.infer_binary_op = tl_infer_binary_op nodes.List.infer_binary_op = tl_infer_binary_op @decorators.yes_if_nothing_inferred def instance_class_infer_binary_op(self, opnode, operator, other, context, method): return method.infer_call_result(self, context) bases.Instance.infer_binary_op = instance_class_infer_binary_op nodes.ClassDef.infer_binary_op = instance_class_infer_binary_op # assignment ################################################################## """the assigned_stmts method is responsible to return the assigned statement (e.g. not inferred) according to the assignment type. The `asspath` argument is used to record the lhs path of the original node. For instance if we want assigned statements for 'c' in 'a, (b,c)', asspath will be [1, 1] once arrived to the Assign node. The `context` argument is the current inference context which should be given to any intermediary inference necessary. """ def _resolve_looppart(parts, asspath, context): """recursive function to resolve multiple assignments on loops""" asspath = asspath[:] index = asspath.pop(0) for part in parts: if part is util.Uninferable: continue # XXX handle __iter__ and log potentially detected errors if not hasattr(part, 'itered'): continue try: itered = part.itered() except TypeError: continue # XXX log error for stmt in itered: index_node = nodes.Const(index) try: assigned = stmt.getitem(index_node, context) except (AttributeError, exceptions.AstroidTypeError, exceptions.AstroidIndexError): continue if not asspath: # we achieved to resolved the assignment path, # don't infer the last part yield assigned elif assigned is util.Uninferable: break else: # we are not yet on the last part of the path # search on each possibly inferred value try: yield from _resolve_looppart(assigned.infer(context), asspath, context) except exceptions.InferenceError: break @decorators.raise_if_nothing_inferred def for_assigned_stmts(self, node=None, context=None, asspath=None): if isinstance(self, nodes.AsyncFor) or getattr(self, 'is_async', False): # Skip inferring of async code for now return dict(node=self, unknown=node, assign_path=asspath, context=context) if asspath is None: for lst in self.iter.infer(context): if isinstance(lst, (nodes.Tuple, nodes.List)): yield from lst.elts else: yield from _resolve_looppart(self.iter.infer(context), asspath, context) # Explicit StopIteration to return error information, see comment # in raise_if_nothing_inferred. return dict(node=self, unknown=node, assign_path=asspath, context=context) nodes.For.assigned_stmts = for_assigned_stmts nodes.Comprehension.assigned_stmts = for_assigned_stmts def sequence_assigned_stmts(self, node=None, context=None, asspath=None): if asspath is None: asspath = [] try: index = self.elts.index(node) except ValueError as exc: raise exceptions.InferenceError( 'Tried to retrieve a node {node!r} which does not exist', node=self, assign_path=asspath, context=context) from exc asspath.insert(0, index) return self.parent.assigned_stmts(node=self, context=context, asspath=asspath) nodes.Tuple.assigned_stmts = sequence_assigned_stmts nodes.List.assigned_stmts = sequence_assigned_stmts def assend_assigned_stmts(self, node=None, context=None, asspath=None): return self.parent.assigned_stmts(node=self, context=context) nodes.AssignName.assigned_stmts = assend_assigned_stmts nodes.AssignAttr.assigned_stmts = assend_assigned_stmts def _arguments_infer_argname(self, name, context): # arguments information may be missing, in which case we can't do anything # more if not (self.args or self.vararg or self.kwarg): yield util.Uninferable return # first argument of instance/class method if self.args and getattr(self.args[0], 'name', None) == name: functype = self.parent.type cls = self.parent.parent.scope() is_metaclass = isinstance(cls, nodes.ClassDef) and cls.type == 'metaclass' # If this is a metaclass, then the first argument will always # be the class, not an instance. if is_metaclass or functype == 'classmethod': yield cls return if functype == 'method': yield bases.Instance(self.parent.parent.frame()) return if context and context.callcontext: call_site = arguments.CallSite(context.callcontext, context.extra_context) for value in call_site.infer_argument(self.parent, name, context): yield value return if name == self.vararg: vararg = nodes.const_factory(()) vararg.parent = self yield vararg return if name == self.kwarg: kwarg = nodes.const_factory({}) kwarg.parent = self yield kwarg return # if there is a default value, yield it. And then yield Uninferable to reflect # we can't guess given argument value try: context = contextmod.copy_context(context) yield from self.default_value(name).infer(context) yield util.Uninferable except exceptions.NoDefault: yield util.Uninferable def arguments_assigned_stmts(self, node=None, context=None, asspath=None): if context.callcontext: # reset call context/name callcontext = context.callcontext context = contextmod.copy_context(context) context.callcontext = None args = arguments.CallSite(callcontext) return args.infer_argument(self.parent, node.name, context) return _arguments_infer_argname(self, node.name, context) nodes.Arguments.assigned_stmts = arguments_assigned_stmts @decorators.raise_if_nothing_inferred def assign_assigned_stmts(self, node=None, context=None, asspath=None): if not asspath: yield self.value return None yield from _resolve_asspart(self.value.infer(context), asspath, context) # Explicit StopIteration to return error information, see comment # in raise_if_nothing_inferred. return dict(node=self, unknown=node, assign_path=asspath, context=context) def assign_annassigned_stmts(self, node=None, context=None, asspath=None): for inferred in assign_assigned_stmts(self, node, context, asspath): if inferred is None: yield util.Uninferable else: yield inferred nodes.Assign.assigned_stmts = assign_assigned_stmts nodes.AnnAssign.assigned_stmts = assign_annassigned_stmts nodes.AugAssign.assigned_stmts = assign_assigned_stmts def _resolve_asspart(parts, asspath, context): """recursive function to resolve multiple assignments""" asspath = asspath[:] index = asspath.pop(0) for part in parts: assigned = None if isinstance(part, nodes.Dict): # A dictionary in an iterating context try: assigned, _ = part.items[index] except IndexError: return elif hasattr(part, 'getitem'): index_node = nodes.Const(index) try: assigned = part.getitem(index_node, context) # XXX raise a specific exception to avoid potential hiding of # unexpected exception ? except (exceptions.AstroidTypeError, exceptions.AstroidIndexError): return if not assigned: return if not asspath: # we achieved to resolved the assignment path, don't infer the # last part yield assigned elif assigned is util.Uninferable: return else: # we are not yet on the last part of the path search on each # possibly inferred value try: yield from _resolve_asspart(assigned.infer(context), asspath, context) except exceptions.InferenceError: return @decorators.raise_if_nothing_inferred def excepthandler_assigned_stmts(self, node=None, context=None, asspath=None): for assigned in node_classes.unpack_infer(self.type): if isinstance(assigned, nodes.ClassDef): assigned = objects.ExceptionInstance(assigned) yield assigned # Explicit StopIteration to return error information, see comment # in raise_if_nothing_inferred. return dict(node=self, unknown=node, assign_path=asspath, context=context) nodes.ExceptHandler.assigned_stmts = excepthandler_assigned_stmts def _infer_context_manager(self, mgr, context): try: inferred = next(mgr.infer(context=context)) except (StopIteration, exceptions.InferenceError): return if isinstance(inferred, bases.Generator): # Check if it is decorated with contextlib.contextmanager. func = inferred.parent if not func.decorators: return for decorator_node in func.decorators.nodes: try: decorator = next(decorator_node.infer(context)) except StopIteration: return if isinstance(decorator, nodes.FunctionDef): if decorator.qname() == _CONTEXTLIB_MGR: break else: # It doesn't interest us. return # Get the first yield point. If it has multiple yields, # then a RuntimeError will be raised. possible_yield_points = func.nodes_of_class(nodes.Yield) # Ignore yields in nested functions yield_point = next((node for node in possible_yield_points if node.scope() == func), None) if yield_point: if not yield_point.value: const = nodes.Const(None) const.parent = yield_point const.lineno = yield_point.lineno yield const else: yield from yield_point.value.infer(context=context) elif isinstance(inferred, bases.Instance): try: enter = next(inferred.igetattr('__enter__', context=context)) except (StopIteration, exceptions.InferenceError, exceptions.AttributeInferenceError): return if not isinstance(enter, bases.BoundMethod): return if not context.callcontext: context.callcontext = contextmod.CallContext(args=[inferred]) for result in enter.infer_call_result(self, context): yield result @decorators.raise_if_nothing_inferred def with_assigned_stmts(self, node=None, context=None, asspath=None): """Infer names and other nodes from a *with* statement. This enables only inference for name binding in a *with* statement. For instance, in the following code, inferring `func` will return the `ContextManager` class, not whatever ``__enter__`` returns. We are doing this intentionally, because we consider that the context manager result is whatever __enter__ returns and what it is binded using the ``as`` keyword. class ContextManager(object): def __enter__(self): return 42 with ContextManager() as f: pass # ContextManager().infer() will return ContextManager # f.infer() will return 42. Arguments: self: nodes.With node: The target of the assignment, `as (a, b)` in `with foo as (a, b)`. context: TODO asspath: TODO """ try: mgr = next(mgr for (mgr, vars) in self.items if vars == node) except StopIteration: return None if asspath is None: for result in _infer_context_manager(self, mgr, context): yield result else: for result in _infer_context_manager(self, mgr, context): # Walk the asspath and get the item at the final index. obj = result for index in asspath: if not hasattr(obj, 'elts'): raise exceptions.InferenceError( 'Wrong type ({targets!r}) for {node!r} assignment', node=self, targets=node, assign_path=asspath, context=context) try: obj = obj.elts[index] except IndexError as exc: raise exceptions.InferenceError( 'Tried to infer a nonexistent target with index {index} ' 'in {node!r}.', node=self, targets=node, assign_path=asspath, context=context) from exc except TypeError as exc: raise exceptions.InferenceError( 'Tried to unpack an non-iterable value ' 'in {node!r}.', node=self, targets=node, assign_path=asspath, context=context) from exc yield obj # Explicit StopIteration to return error information, see comment # in raise_if_nothing_inferred. return dict(node=self, unknown=node, assign_path=asspath, context=context) nodes.With.assigned_stmts = with_assigned_stmts @decorators.yes_if_nothing_inferred def starred_assigned_stmts(self, node=None, context=None, asspath=None): """ Arguments: self: nodes.Starred node: TODO context: TODO asspath: TODO """ # pylint: disable=too-many-locals,too-many-branches,too-many-statements def _determine_starred_iteration_lookups(starred, target, lookups): # Determine the lookups for the rhs of the iteration itered = target.itered() for index, element in enumerate(itered): if isinstance(element, nodes.Starred) and element.value.name == starred.value.name: lookups.append((index, len(itered))) break if isinstance(element, nodes.Tuple): lookups.append((index, len(element.itered()))) _determine_starred_iteration_lookups(starred, element, lookups) stmt = self.statement() if not isinstance(stmt, (nodes.Assign, nodes.For)): raise exceptions.InferenceError('Statement {stmt!r} enclosing {node!r} ' 'must be an Assign or For node.', node=self, stmt=stmt, unknown=node, context=context) if context is None: context = contextmod.InferenceContext() if isinstance(stmt, nodes.Assign): value = stmt.value lhs = stmt.targets[0] if sum(1 for _ in lhs.nodes_of_class(nodes.Starred)) > 1: raise exceptions.InferenceError('Too many starred arguments in the ' ' assignment targets {lhs!r}.', node=self, targets=lhs, unknown=node, context=context) try: rhs = next(value.infer(context)) except exceptions.InferenceError: yield util.Uninferable return if rhs is util.Uninferable or not hasattr(rhs, 'itered'): yield util.Uninferable return try: elts = collections.deque(rhs.itered()) except TypeError: yield util.Uninferable return # Unpack iteratively the values from the rhs of the assignment, # until the find the starred node. What will remain will # be the list of values which the Starred node will represent # This is done in two steps, from left to right to remove # anything before the starred node and from right to left # to remove anything after the starred node. for index, left_node in enumerate(lhs.elts): if not isinstance(left_node, nodes.Starred): if not elts: break elts.popleft() continue lhs_elts = collections.deque(reversed(lhs.elts[index:])) for right_node in lhs_elts: if not isinstance(right_node, nodes.Starred): if not elts: break elts.pop() continue # We're done packed = nodes.List( ctx=Store, parent=self, lineno=lhs.lineno, col_offset=lhs.col_offset, ) packed.postinit(elts=elts) yield packed break if isinstance(stmt, nodes.For): try: inferred_iterable = next(stmt.iter.infer(context=context)) except exceptions.InferenceError: yield util.Uninferable return if inferred_iterable is util.Uninferable or not hasattr(inferred_iterable, 'itered'): yield util.Uninferable return try: itered = inferred_iterable.itered() except TypeError: yield util.Uninferable return target = stmt.target if not isinstance(target, nodes.Tuple): raise exceptions.InferenceError( 'Could not make sense of this, the target must be a tuple', context=context, ) lookups = [] _determine_starred_iteration_lookups(self, target, lookups) if not lookups: raise exceptions.InferenceError( 'Could not make sense of this, needs at least a lookup', context=context, ) # Make the last lookup a slice, since that what we want for a Starred node last_element_index, last_element_length = lookups[-1] is_starred_last = last_element_index == (last_element_length - 1) lookup_slice = slice( last_element_index, None if is_starred_last else (last_element_length - last_element_index) ) lookups[-1] = lookup_slice for element in itered: # We probably want to infer the potential values *for each* element in an # iterable, but we can't infer a list of all values, when only a list of # step values are expected: # # for a, *b in [...]: # b # # *b* should now point to just the elements at that particular iteration step, # which astroid can't know about. found_element = None for lookup in lookups: if not hasattr(element, 'itered'): break if not isinstance(lookup, slice): # Grab just the index, not the whole length lookup = lookup[0] try: itered_inner_element = element.itered() element = itered_inner_element[lookup] except IndexError: break except TypeError: # Most likely the itered() call failed, cannot make sense of this yield util.Uninferable return else: found_element = element unpacked = nodes.List( ctx=Store, parent=self, lineno=self.lineno, col_offset=self.col_offset, ) unpacked.postinit(elts=found_element or []) yield unpacked return yield util.Uninferable nodes.Starred.assigned_stmts = starred_assigned_stmts astroid-2.0.1/astroid/raw_building.py0000644000076500000240000003673113324063433020430 0ustar claudiustaff00000000000000# Copyright (c) 2006-2014 LOGILAB S.A. (Paris, FRANCE) # Copyright (c) 2012 FELD Boris # Copyright (c) 2014-2018 Claudiu Popa # Copyright (c) 2014 Google, Inc. # Copyright (c) 2015-2016 Ceridwen # Copyright (c) 2015 Florian Bruhin # Copyright (c) 2015 Ovidiu Sabou # Copyright (c) 2016 Derek Gustafson # Copyright (c) 2016 Jakub Wilk # Copyright (c) 2018 Nick Drozd # Copyright (c) 2018 Bryce Guinta # Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html # For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER """this module contains a set of functions to create astroid trees from scratch (build_* functions) or from living object (object_build_* functions) """ import builtins import inspect import logging import os import sys import types from astroid import bases from astroid import manager from astroid import node_classes from astroid import nodes MANAGER = manager.AstroidManager() # the keys of CONST_CLS eg python builtin types _CONSTANTS = tuple(node_classes.CONST_CLS) _BUILTINS = vars(builtins) _LOG = logging.getLogger(__name__) def _io_discrepancy(member): # _io module names itself `io`: http://bugs.python.org/issue18602 member_self = getattr(member, '__self__', None) return (member_self and inspect.ismodule(member_self) and member_self.__name__ == '_io' and member.__module__ == 'io') def _attach_local_node(parent, node, name): node.name = name # needed by add_local_node parent.add_local_node(node) def _add_dunder_class(func, member): """Add a __class__ member to the given func node, if we can determine it.""" python_cls = member.__class__ cls_name = getattr(python_cls, '__name__', None) if not cls_name: return cls_bases = [ancestor.__name__ for ancestor in python_cls.__bases__] ast_klass = build_class(cls_name, cls_bases, python_cls.__doc__) func.instance_attrs['__class__'] = [ast_klass] _marker = object() def attach_dummy_node(node, name, runtime_object=_marker): """create a dummy node and register it in the locals of the given node with the specified name """ enode = nodes.EmptyNode() enode.object = runtime_object _attach_local_node(node, enode, name) def _has_underlying_object(self): return self.object is not None and self.object is not _marker nodes.EmptyNode.has_underlying_object = _has_underlying_object def attach_const_node(node, name, value): """create a Const node and register it in the locals of the given node with the specified name """ if name not in node.special_attributes: _attach_local_node(node, nodes.const_factory(value), name) def attach_import_node(node, modname, membername): """create a ImportFrom node and register it in the locals of the given node with the specified name """ from_node = nodes.ImportFrom(modname, [(membername, None)]) _attach_local_node(node, from_node, membername) def build_module(name, doc=None): """create and initialize a astroid Module node""" node = nodes.Module(name, doc, pure_python=False) node.package = False node.parent = None return node def build_class(name, basenames=(), doc=None): """create and initialize a astroid ClassDef node""" node = nodes.ClassDef(name, doc) for base in basenames: basenode = nodes.Name() basenode.name = base node.bases.append(basenode) basenode.parent = node return node def build_function(name, args=None, defaults=None, doc=None): """create and initialize a astroid FunctionDef node""" args, defaults = args or [], defaults or [] # first argument is now a list of decorators func = nodes.FunctionDef(name, doc) func.args = argsnode = nodes.Arguments() argsnode.args = [] for arg in args: argsnode.args.append(nodes.Name()) argsnode.args[-1].name = arg argsnode.args[-1].parent = argsnode argsnode.defaults = [] for default in defaults: argsnode.defaults.append(nodes.const_factory(default)) argsnode.defaults[-1].parent = argsnode argsnode.kwarg = None argsnode.vararg = None argsnode.parent = func if args: register_arguments(func) return func def build_from_import(fromname, names): """create and initialize an astroid ImportFrom import statement""" return nodes.ImportFrom(fromname, [(name, None) for name in names]) def register_arguments(func, args=None): """add given arguments to local args is a list that may contains nested lists (i.e. def func(a, (b, c, d)): ...) """ if args is None: args = func.args.args if func.args.vararg: func.set_local(func.args.vararg, func.args) if func.args.kwarg: func.set_local(func.args.kwarg, func.args) for arg in args: if isinstance(arg, nodes.Name): func.set_local(arg.name, arg) else: register_arguments(func, arg.elts) def object_build_class(node, member, localname): """create astroid for a living class object""" basenames = [base.__name__ for base in member.__bases__] return _base_class_object_build(node, member, basenames, localname=localname) def object_build_function(node, member, localname): """create astroid for a living function object""" # pylint: disable=deprecated-method; completely removed in 2.0 args, varargs, varkw, defaults = inspect.getargspec(member) if varargs is not None: args.append(varargs) if varkw is not None: args.append(varkw) func = build_function(getattr(member, '__name__', None) or localname, args, defaults, member.__doc__) node.add_local_node(func, localname) def object_build_datadescriptor(node, member, name): """create astroid for a living data descriptor object""" return _base_class_object_build(node, member, [], name) def object_build_methoddescriptor(node, member, localname): """create astroid for a living method descriptor object""" # FIXME get arguments ? func = build_function(getattr(member, '__name__', None) or localname, doc=member.__doc__) # set node's arguments to None to notice that we have no information, not # and empty argument list func.args.args = None node.add_local_node(func, localname) _add_dunder_class(func, member) def _base_class_object_build(node, member, basenames, name=None, localname=None): """create astroid for a living class object, with a given set of base names (e.g. ancestors) """ klass = build_class(name or getattr(member, '__name__', None) or localname, basenames, member.__doc__) klass._newstyle = isinstance(member, type) node.add_local_node(klass, localname) try: # limit the instantiation trick since it's too dangerous # (such as infinite test execution...) # this at least resolves common case such as Exception.args, # OSError.errno if issubclass(member, Exception): instdict = member().__dict__ else: raise TypeError except: # pylint: disable=bare-except pass else: for item_name, obj in instdict.items(): valnode = nodes.EmptyNode() valnode.object = obj valnode.parent = klass valnode.lineno = 1 klass.instance_attrs[item_name] = [valnode] return klass def _build_from_function(node, name, member, module): # verify this is not an imported function try: code = member.__code__ except AttributeError: # Some implementations don't provide the code object, # such as Jython. code = None filename = getattr(code, 'co_filename', None) if filename is None: assert isinstance(member, object) object_build_methoddescriptor(node, member, name) elif filename != getattr(module, '__file__', None): attach_dummy_node(node, name, member) else: object_build_function(node, member, name) class InspectBuilder: """class for building nodes from living object this is actually a really minimal representation, including only Module, FunctionDef and ClassDef nodes and some others as guessed. """ # astroid from living objects ############################################### def __init__(self): self._done = {} self._module = None def inspect_build(self, module, modname=None, path=None): """build astroid from a living module (i.e. using inspect) this is used when there is no python source code available (either because it's a built-in module or because the .py is not available) """ self._module = module if modname is None: modname = module.__name__ try: node = build_module(modname, module.__doc__) except AttributeError: # in jython, java modules have no __doc__ (see #109562) node = build_module(modname) node.file = node.path = os.path.abspath(path) if path else path node.name = modname MANAGER.cache_module(node) node.package = hasattr(module, '__path__') self._done = {} self.object_build(node, module) return node def object_build(self, node, obj): """recursive method which create a partial ast from real objects (only function, class, and method are handled) """ if obj in self._done: return self._done[obj] self._done[obj] = node for name in dir(obj): try: member = getattr(obj, name) except AttributeError: # damned ExtensionClass.Base, I know you're there ! attach_dummy_node(node, name) continue if inspect.ismethod(member): member = member.__func__ if inspect.isfunction(member): _build_from_function(node, name, member, self._module) elif inspect.isbuiltin(member): if (not _io_discrepancy(member) and self.imported_member(node, member, name)): continue object_build_methoddescriptor(node, member, name) elif inspect.isclass(member): if self.imported_member(node, member, name): continue if member in self._done: class_node = self._done[member] if class_node not in node.locals.get(name, ()): node.add_local_node(class_node, name) else: class_node = object_build_class(node, member, name) # recursion self.object_build(class_node, member) if name == '__class__' and class_node.parent is None: class_node.parent = self._done[self._module] elif inspect.ismethoddescriptor(member): assert isinstance(member, object) object_build_methoddescriptor(node, member, name) elif inspect.isdatadescriptor(member): assert isinstance(member, object) object_build_datadescriptor(node, member, name) elif isinstance(member, _CONSTANTS): attach_const_node(node, name, member) elif inspect.isroutine(member): # This should be called for Jython, where some builtin # methods aren't caught by isbuiltin branch. _build_from_function(node, name, member, self._module) else: # create an empty node so that the name is actually defined attach_dummy_node(node, name, member) return None def imported_member(self, node, member, name): """verify this is not an imported class or handle it""" # /!\ some classes like ExtensionClass doesn't have a __module__ # attribute ! Also, this may trigger an exception on badly built module # (see http://www.logilab.org/ticket/57299 for instance) try: modname = getattr(member, '__module__', None) except: # pylint: disable=bare-except _LOG.exception('unexpected error while building ' 'astroid from living object') modname = None if modname is None: if name in ('__new__', '__subclasshook__'): # Python 2.5.1 (r251:54863, Sep 1 2010, 22:03:14) # >>> print object.__new__.__module__ # None modname = builtins.__name__ else: attach_dummy_node(node, name, member) return True real_name = { 'gtk': 'gtk_gtk', '_io': 'io', }.get(modname, modname) if real_name != self._module.__name__: # check if it sounds valid and then add an import node, else use a # dummy node try: getattr(sys.modules[modname], name) except (KeyError, AttributeError): attach_dummy_node(node, name, member) else: attach_import_node(node, modname, name) return True return False ### astroid bootstrapping ###################################################### Astroid_BUILDER = InspectBuilder() _CONST_PROXY = {} def _astroid_bootstrapping(astroid_builtin=None): """astroid boot strapping the builtins module""" # this boot strapping is necessary since we need the Const nodes to # inspect_build builtins, and then we can proxy Const if astroid_builtin is None: astroid_builtin = Astroid_BUILDER.inspect_build(builtins) # pylint: disable=redefined-outer-name for cls, node_cls in node_classes.CONST_CLS.items(): if cls is type(None): proxy = build_class('NoneType') proxy.parent = astroid_builtin elif cls is type(NotImplemented): proxy = build_class('NotImplementedType') proxy.parent = astroid_builtin else: proxy = astroid_builtin.getattr(cls.__name__)[0] if cls in (dict, list, set, tuple): node_cls._proxied = proxy else: _CONST_PROXY[cls] = proxy _astroid_bootstrapping() # TODO : find a nicer way to handle this situation; def _set_proxied(const): return _CONST_PROXY[const.value.__class__] nodes.Const._proxied = property(_set_proxied) _GeneratorType = nodes.ClassDef(types.GeneratorType.__name__, types.GeneratorType.__doc__) _GeneratorType.parent = MANAGER.astroid_cache[builtins.__name__] bases.Generator._proxied = _GeneratorType Astroid_BUILDER.object_build(bases.Generator._proxied, types.GeneratorType) _builtins = MANAGER.astroid_cache[builtins.__name__] BUILTIN_TYPES = (types.GetSetDescriptorType, types.GeneratorType, types.MemberDescriptorType, type(None), type(NotImplemented), types.FunctionType, types.MethodType, types.BuiltinFunctionType, types.ModuleType, types.TracebackType) for _type in BUILTIN_TYPES: if _type.__name__ not in _builtins: cls = nodes.ClassDef(_type.__name__, _type.__doc__) cls.parent = MANAGER.astroid_cache[builtins.__name__] Astroid_BUILDER.object_build(cls, _type) _builtins[_type.__name__] = cls astroid-2.0.1/astroid/rebuilder.py0000644000076500000240000012113013324063433017723 0ustar claudiustaff00000000000000# -*- coding: utf-8 -*- # Copyright (c) 2009-2014 LOGILAB S.A. (Paris, FRANCE) # Copyright (c) 2013-2018 Claudiu Popa # Copyright (c) 2013-2014 Google, Inc. # Copyright (c) 2014 Alexander Presnyakov # Copyright (c) 2014 Eevee (Alex Munroe) # Copyright (c) 2015-2016 Ceridwen # Copyright (c) 2016-2017 Derek Gustafson # Copyright (c) 2016 Jared Garst # Copyright (c) 2017 Hugo # Copyright (c) 2017 Łukasz Rogalski # Copyright (c) 2017 rr- # Copyright (c) 2018 Nick Drozd # Copyright (c) 2018 Bryce Guinta # Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html # For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER """this module contains utilities for rebuilding a _ast tree in order to get a single Astroid representation """ import sys import astroid from astroid._ast import _parse, _get_parser_module, parse_function_type_comment from astroid import nodes CONST_NAME_TRANSFORMS = {'None': None, 'True': True, 'False': False, } REDIRECT = {'arguments': 'Arguments', 'comprehension': 'Comprehension', "ListCompFor": 'Comprehension', "GenExprFor": 'Comprehension', 'excepthandler': 'ExceptHandler', 'keyword': 'Keyword', } PY3 = sys.version_info >= (3, 0) PY34 = sys.version_info >= (3, 4) PY37 = sys.version_info >= (3, 7) def _binary_operators_from_module(module): binary_operators = { module.Add: '+', module.BitAnd: '&', module.BitOr: '|', module.BitXor: '^', module.Div: '/', module.FloorDiv: '//', module.Mod: '%', module.Mult: '*', module.Pow: '**', module.Sub: '-', module.LShift: '<<', module.RShift: '>>', } if sys.version_info >= (3, 5): binary_operators[module.MatMult] = '@' return binary_operators def _bool_operators_from_module(module): return { module.And: 'and', module.Or: 'or', } def _unary_operators_from_module(module): return { module.UAdd: '+', module.USub: '-', module.Not: 'not', module.Invert: '~', } def _compare_operators_from_module(module): return { module.Eq: '==', module.Gt: '>', module.GtE: '>=', module.In: 'in', module.Is: 'is', module.IsNot: 'is not', module.Lt: '<', module.LtE: '<=', module.NotEq: '!=', module.NotIn: 'not in', } def _contexts_from_module(module): return { module.Load: astroid.Load, module.Store: astroid.Store, module.Del: astroid.Del, module.Param: astroid.Store, } def _visit_or_none(node, attr, visitor, parent, visit='visit', **kws): """If the given node has an attribute, visits the attribute, and otherwise returns None. """ value = getattr(node, attr, None) if value: return getattr(visitor, visit)(value, parent, **kws) return None class TreeRebuilder: """Rebuilds the _ast tree to become an Astroid tree""" def __init__(self, manager, parse_python_two: bool = False): self._manager = manager self._global_names = [] self._import_from_nodes = [] self._delayed_assattr = [] self._visit_meths = {} # Configure the right classes for the right module self._parser_module = _get_parser_module(parse_python_two=parse_python_two) self._unary_op_classes = _unary_operators_from_module(self._parser_module) self._cmp_op_classes = _compare_operators_from_module(self._parser_module) self._bool_op_classes = _bool_operators_from_module(self._parser_module) self._bin_op_classes = _binary_operators_from_module(self._parser_module) self._context_classes = _contexts_from_module(self._parser_module) def _get_doc(self, node): try: if PY37 and hasattr(node, 'docstring'): doc = node.docstring return node, doc if (node.body and isinstance(node.body[0], self._parser_module.Expr) and isinstance(node.body[0].value, self._parser_module.Str)): doc = node.body[0].value.s node.body = node.body[1:] return node, doc except IndexError: pass # ast built from scratch return node, None def _get_context(self, node): return self._context_classes.get(type(node.ctx), astroid.Load) def visit_module(self, node, modname, modpath, package): """visit a Module node by returning a fresh instance of it""" node, doc = self._get_doc(node) newnode = nodes.Module(name=modname, doc=doc, file=modpath, path=[modpath], package=package, parent=None) newnode.postinit([self.visit(child, newnode) for child in node.body]) return newnode def visit(self, node, parent): cls = node.__class__ if cls in self._visit_meths: visit_method = self._visit_meths[cls] else: cls_name = cls.__name__ visit_name = 'visit_' + REDIRECT.get(cls_name, cls_name).lower() visit_method = getattr(self, visit_name) self._visit_meths[cls] = visit_method return visit_method(node, parent) def _save_assignment(self, node, name=None): """save assignement situation since node.parent is not available yet""" if self._global_names and node.name in self._global_names[-1]: node.root().set_local(node.name, node) else: node.parent.set_local(node.name, node) def visit_arguments(self, node, parent): """visit a Arguments node by returning a fresh instance of it""" vararg, kwarg = node.vararg, node.kwarg if PY34: newnode = nodes.Arguments(vararg.arg if vararg else None, kwarg.arg if kwarg else None, parent) else: newnode = nodes.Arguments(vararg, kwarg, parent) args = [self.visit(child, newnode) for child in node.args] defaults = [self.visit(child, newnode) for child in node.defaults] varargannotation = None kwargannotation = None # change added in 82732 (7c5c678e4164), vararg and kwarg # are instances of `_ast.arg`, not strings if vararg: if PY34: if node.vararg.annotation: varargannotation = self.visit(node.vararg.annotation, newnode) vararg = vararg.arg if kwarg: if PY34: if node.kwarg.annotation: kwargannotation = self.visit(node.kwarg.annotation, newnode) kwarg = kwarg.arg if PY3: kwonlyargs = [self.visit(child, newnode) for child in node.kwonlyargs] kw_defaults = [self.visit(child, newnode) if child else None for child in node.kw_defaults] annotations = [self.visit(arg.annotation, newnode) if arg.annotation else None for arg in node.args] kwonlyargs_annotations = [ self.visit(arg.annotation, newnode) if arg.annotation else None for arg in node.kwonlyargs ] else: kwonlyargs = [] kw_defaults = [] annotations = [] kwonlyargs_annotations = [] newnode.postinit( args=args, defaults=defaults, kwonlyargs=kwonlyargs, kw_defaults=kw_defaults, annotations=annotations, kwonlyargs_annotations=kwonlyargs_annotations, varargannotation=varargannotation, kwargannotation=kwargannotation ) # save argument names in locals: if vararg: newnode.parent.set_local(vararg, newnode) if kwarg: newnode.parent.set_local(kwarg, newnode) return newnode def visit_assert(self, node, parent): """visit a Assert node by returning a fresh instance of it""" newnode = nodes.Assert(node.lineno, node.col_offset, parent) if node.msg: msg = self.visit(node.msg, newnode) else: msg = None newnode.postinit(self.visit(node.test, newnode), msg) return newnode def check_type_comment(self, node): type_comment = getattr(node, 'type_comment', None) if not type_comment: return None try: type_comment_ast = _parse(type_comment) except SyntaxError: # Invalid type comment, just skip it. return None type_object = self.visit(type_comment_ast.body[0], node) if not isinstance(type_object, nodes.Expr): return None return type_object.value def check_function_type_comment(self, node): type_comment = getattr(node, 'type_comment', None) if not type_comment: return None try: type_comment_ast = parse_function_type_comment(type_comment) except SyntaxError: # Invalid type comment, just skip it. return None returns = None argtypes = [self.visit(elem, node) for elem in (type_comment_ast.argtypes or [])] if type_comment_ast.returns: returns = self.visit(type_comment_ast.returns, node) return returns, argtypes def visit_assign(self, node, parent): """visit a Assign node by returning a fresh instance of it""" type_annotation = self.check_type_comment(node) newnode = nodes.Assign(node.lineno, node.col_offset, parent) newnode.postinit( targets=[self.visit(child, newnode) for child in node.targets], value=self.visit(node.value, newnode), type_annotation=type_annotation, ) return newnode def visit_assignname(self, node, parent, node_name=None): '''visit a node and return a AssignName node''' newnode = nodes.AssignName(node_name, getattr(node, 'lineno', None), getattr(node, 'col_offset', None), parent) self._save_assignment(newnode) return newnode def visit_augassign(self, node, parent): """visit a AugAssign node by returning a fresh instance of it""" newnode = nodes.AugAssign(self._bin_op_classes[type(node.op)] + "=", node.lineno, node.col_offset, parent) newnode.postinit(self.visit(node.target, newnode), self.visit(node.value, newnode)) return newnode def visit_repr(self, node, parent): """visit a Backquote node by returning a fresh instance of it""" newnode = nodes.Repr(node.lineno, node.col_offset, parent) newnode.postinit(self.visit(node.value, newnode)) return newnode def visit_binop(self, node, parent): """visit a BinOp node by returning a fresh instance of it""" newnode = nodes.BinOp(self._bin_op_classes[type(node.op)], node.lineno, node.col_offset, parent) newnode.postinit(self.visit(node.left, newnode), self.visit(node.right, newnode)) return newnode def visit_boolop(self, node, parent): """visit a BoolOp node by returning a fresh instance of it""" newnode = nodes.BoolOp(self._bool_op_classes[type(node.op)], node.lineno, node.col_offset, parent) newnode.postinit([self.visit(child, newnode) for child in node.values]) return newnode def visit_break(self, node, parent): """visit a Break node by returning a fresh instance of it""" return nodes.Break(getattr(node, 'lineno', None), getattr(node, 'col_offset', None), parent) def visit_call(self, node, parent): """visit a CallFunc node by returning a fresh instance of it""" newnode = nodes.Call(node.lineno, node.col_offset, parent) starargs = _visit_or_none(node, 'starargs', self, newnode) kwargs = _visit_or_none(node, 'kwargs', self, newnode) args = [self.visit(child, newnode) for child in node.args] if node.keywords: keywords = [self.visit(child, newnode) for child in node.keywords] else: keywords = None if starargs: new_starargs = nodes.Starred(col_offset=starargs.col_offset, lineno=starargs.lineno, parent=starargs.parent) new_starargs.postinit(value=starargs) args.append(new_starargs) if kwargs: new_kwargs = nodes.Keyword(arg=None, col_offset=kwargs.col_offset, lineno=kwargs.lineno, parent=kwargs.parent) new_kwargs.postinit(value=kwargs) if keywords: keywords.append(new_kwargs) else: keywords = [new_kwargs] newnode.postinit(self.visit(node.func, newnode), args, keywords) return newnode def visit_classdef(self, node, parent, newstyle=None): """visit a ClassDef node to become astroid""" node, doc = self._get_doc(node) newnode = nodes.ClassDef(node.name, doc, node.lineno, node.col_offset, parent) metaclass = None if PY3: for keyword in node.keywords: if keyword.arg == 'metaclass': metaclass = self.visit(keyword, newnode).value break if node.decorator_list: decorators = self.visit_decorators(node, newnode) else: decorators = None newnode.postinit([self.visit(child, newnode) for child in node.bases], [self.visit(child, newnode) for child in node.body], decorators, newstyle, metaclass, [self.visit(kwd, newnode) for kwd in node.keywords if kwd.arg != 'metaclass'] if PY3 else []) return newnode def visit_const(self, node, parent): """visit a Const node by returning a fresh instance of it""" return nodes.Const(node.value, getattr(node, 'lineno', None), getattr(node, 'col_offset', None), parent) def visit_continue(self, node, parent): """visit a Continue node by returning a fresh instance of it""" return nodes.Continue(getattr(node, 'lineno', None), getattr(node, 'col_offset', None), parent) def visit_compare(self, node, parent): """visit a Compare node by returning a fresh instance of it""" newnode = nodes.Compare(node.lineno, node.col_offset, parent) newnode.postinit(self.visit(node.left, newnode), [(self._cmp_op_classes[op.__class__], self.visit(expr, newnode)) for (op, expr) in zip(node.ops, node.comparators)]) return newnode def visit_comprehension(self, node, parent): """visit a Comprehension node by returning a fresh instance of it""" newnode = nodes.Comprehension(parent) newnode.postinit(self.visit(node.target, newnode), self.visit(node.iter, newnode), [self.visit(child, newnode) for child in node.ifs], getattr(node, 'is_async', None)) return newnode def visit_decorators(self, node, parent): """visit a Decorators node by returning a fresh instance of it""" # /!\ node is actually a _ast.FunctionDef node while # parent is a astroid.nodes.FunctionDef node newnode = nodes.Decorators(node.lineno, node.col_offset, parent) newnode.postinit([self.visit(child, newnode) for child in node.decorator_list]) return newnode def visit_delete(self, node, parent): """visit a Delete node by returning a fresh instance of it""" newnode = nodes.Delete(node.lineno, node.col_offset, parent) newnode.postinit([self.visit(child, newnode) for child in node.targets]) return newnode def _visit_dict_items(self, node, parent, newnode): for key, value in zip(node.keys, node.values): rebuilt_value = self.visit(value, newnode) if not key: # Python 3.5 and extended unpacking rebuilt_key = nodes.DictUnpack(rebuilt_value.lineno, rebuilt_value.col_offset, parent) else: rebuilt_key = self.visit(key, newnode) yield rebuilt_key, rebuilt_value def visit_dict(self, node, parent): """visit a Dict node by returning a fresh instance of it""" newnode = nodes.Dict(node.lineno, node.col_offset, parent) items = list(self._visit_dict_items(node, parent, newnode)) newnode.postinit(items) return newnode def visit_dictcomp(self, node, parent): """visit a DictComp node by returning a fresh instance of it""" newnode = nodes.DictComp(node.lineno, node.col_offset, parent) newnode.postinit(self.visit(node.key, newnode), self.visit(node.value, newnode), [self.visit(child, newnode) for child in node.generators]) return newnode def visit_expr(self, node, parent): """visit a Expr node by returning a fresh instance of it""" newnode = nodes.Expr(node.lineno, node.col_offset, parent) newnode.postinit(self.visit(node.value, newnode)) return newnode def visit_ellipsis(self, node, parent): """visit an Ellipsis node by returning a fresh instance of it""" return nodes.Ellipsis(getattr(node, 'lineno', None), getattr(node, 'col_offset', None), parent) def visit_emptynode(self, node, parent): """visit an EmptyNode node by returning a fresh instance of it""" return nodes.EmptyNode(getattr(node, 'lineno', None), getattr(node, 'col_offset', None), parent) def visit_excepthandler(self, node, parent): """visit an ExceptHandler node by returning a fresh instance of it""" newnode = nodes.ExceptHandler(node.lineno, node.col_offset, parent) # /!\ node.name can be a tuple newnode.postinit(_visit_or_none(node, 'type', self, newnode), _visit_or_none(node, 'name', self, newnode), [self.visit(child, newnode) for child in node.body]) return newnode def visit_exec(self, node, parent): """visit an Exec node by returning a fresh instance of it""" newnode = nodes.Exec(node.lineno, node.col_offset, parent) newnode.postinit(self.visit(node.body, newnode), _visit_or_none(node, 'globals', self, newnode), _visit_or_none(node, 'locals', self, newnode)) return newnode def visit_extslice(self, node, parent): """visit an ExtSlice node by returning a fresh instance of it""" newnode = nodes.ExtSlice(parent=parent) newnode.postinit([self.visit(dim, newnode) for dim in node.dims]) return newnode def _visit_for(self, cls, node, parent): """visit a For node by returning a fresh instance of it""" newnode = cls(node.lineno, node.col_offset, parent) type_annotation = self.check_type_comment(node) newnode.postinit( target=self.visit(node.target, newnode), iter=self.visit(node.iter, newnode), body=[self.visit(child, newnode) for child in node.body], orelse=[self.visit(child, newnode) for child in node.orelse], type_annotation=type_annotation, ) return newnode def visit_for(self, node, parent): return self._visit_for(nodes.For, node, parent) def visit_importfrom(self, node, parent): """visit an ImportFrom node by returning a fresh instance of it""" names = [(alias.name, alias.asname) for alias in node.names] newnode = nodes.ImportFrom(node.module or '', names, node.level or None, getattr(node, 'lineno', None), getattr(node, 'col_offset', None), parent) # store From names to add them to locals after building self._import_from_nodes.append(newnode) return newnode def _visit_functiondef(self, cls, node, parent): """visit an FunctionDef node to become astroid""" self._global_names.append({}) node, doc = self._get_doc(node) newnode = cls(node.name, doc, node.lineno, node.col_offset, parent) if node.decorator_list: decorators = self.visit_decorators(node, newnode) else: decorators = None if PY3 and node.returns: returns = self.visit(node.returns, newnode) else: returns = None type_comment_args = type_comment_returns = None type_comment_annotation = self.check_function_type_comment(node) if type_comment_annotation: type_comment_returns, type_comment_args = type_comment_annotation newnode.postinit( args=self.visit(node.args, newnode), body=[self.visit(child, newnode) for child in node.body], decorators=decorators, returns=returns, type_comment_returns=type_comment_returns, type_comment_args=type_comment_args, ) self._global_names.pop() return newnode def visit_functiondef(self, node, parent): return self._visit_functiondef(nodes.FunctionDef, node, parent) def visit_generatorexp(self, node, parent): """visit a GeneratorExp node by returning a fresh instance of it""" newnode = nodes.GeneratorExp(node.lineno, node.col_offset, parent) newnode.postinit(self.visit(node.elt, newnode), [self.visit(child, newnode) for child in node.generators]) return newnode def visit_attribute(self, node, parent): """visit an Attribute node by returning a fresh instance of it""" context = self._get_context(node) if context == astroid.Del: # FIXME : maybe we should reintroduce and visit_delattr ? # for instance, deactivating assign_ctx newnode = nodes.DelAttr(node.attr, node.lineno, node.col_offset, parent) elif context == astroid.Store: newnode = nodes.AssignAttr(node.attr, node.lineno, node.col_offset, parent) # Prohibit a local save if we are in an ExceptHandler. if not isinstance(parent, astroid.ExceptHandler): self._delayed_assattr.append(newnode) else: newnode = nodes.Attribute(node.attr, node.lineno, node.col_offset, parent) newnode.postinit(self.visit(node.value, newnode)) return newnode def visit_global(self, node, parent): """visit a Global node to become astroid""" newnode = nodes.Global(node.names, getattr(node, 'lineno', None), getattr(node, 'col_offset', None), parent) if self._global_names: # global at the module level, no effect for name in node.names: self._global_names[-1].setdefault(name, []).append(newnode) return newnode def visit_if(self, node, parent): """visit an If node by returning a fresh instance of it""" newnode = nodes.If(node.lineno, node.col_offset, parent) newnode.postinit(self.visit(node.test, newnode), [self.visit(child, newnode) for child in node.body], [self.visit(child, newnode) for child in node.orelse]) return newnode def visit_ifexp(self, node, parent): """visit a IfExp node by returning a fresh instance of it""" newnode = nodes.IfExp(node.lineno, node.col_offset, parent) newnode.postinit(self.visit(node.test, newnode), self.visit(node.body, newnode), self.visit(node.orelse, newnode)) return newnode def visit_import(self, node, parent): """visit a Import node by returning a fresh instance of it""" names = [(alias.name, alias.asname) for alias in node.names] newnode = nodes.Import(names, getattr(node, 'lineno', None), getattr(node, 'col_offset', None), parent) # save import names in parent's locals: for (name, asname) in newnode.names: name = asname or name parent.set_local(name.split('.')[0], newnode) return newnode def visit_index(self, node, parent): """visit a Index node by returning a fresh instance of it""" newnode = nodes.Index(parent=parent) newnode.postinit(self.visit(node.value, newnode)) return newnode def visit_keyword(self, node, parent): """visit a Keyword node by returning a fresh instance of it""" newnode = nodes.Keyword(node.arg, parent=parent) newnode.postinit(self.visit(node.value, newnode)) return newnode def visit_lambda(self, node, parent): """visit a Lambda node by returning a fresh instance of it""" newnode = nodes.Lambda(node.lineno, node.col_offset, parent) newnode.postinit(self.visit(node.args, newnode), self.visit(node.body, newnode)) return newnode def visit_list(self, node, parent): """visit a List node by returning a fresh instance of it""" context = self._get_context(node) newnode = nodes.List(ctx=context, lineno=node.lineno, col_offset=node.col_offset, parent=parent) newnode.postinit([self.visit(child, newnode) for child in node.elts]) return newnode def visit_listcomp(self, node, parent): """visit a ListComp node by returning a fresh instance of it""" newnode = nodes.ListComp(node.lineno, node.col_offset, parent) newnode.postinit(self.visit(node.elt, newnode), [self.visit(child, newnode) for child in node.generators]) return newnode def visit_name(self, node, parent): """visit a Name node by returning a fresh instance of it""" context = self._get_context(node) # True and False can be assigned to something in py2x, so we have to # check first the context. if context == astroid.Del: newnode = nodes.DelName(node.id, node.lineno, node.col_offset, parent) elif context == astroid.Store: newnode = nodes.AssignName(node.id, node.lineno, node.col_offset, parent) elif node.id in CONST_NAME_TRANSFORMS: newnode = nodes.Const(CONST_NAME_TRANSFORMS[node.id], getattr(node, 'lineno', None), getattr(node, 'col_offset', None), parent) return newnode else: newnode = nodes.Name(node.id, node.lineno, node.col_offset, parent) # XXX REMOVE me : if context in (astroid.Del, astroid.Store): # 'Aug' ?? self._save_assignment(newnode) return newnode def visit_str(self, node, parent): """visit a String/Bytes node by returning a fresh instance of Const""" return nodes.Const(node.s, getattr(node, 'lineno', None), getattr(node, 'col_offset', None), parent) visit_bytes = visit_str def visit_num(self, node, parent): """visit a Num node by returning a fresh instance of Const""" return nodes.Const(node.n, getattr(node, 'lineno', None), getattr(node, 'col_offset', None), parent) def visit_pass(self, node, parent): """visit a Pass node by returning a fresh instance of it""" return nodes.Pass(node.lineno, node.col_offset, parent) def visit_print(self, node, parent): """visit a Print node by returning a fresh instance of it""" newnode = nodes.Print(node.nl, node.lineno, node.col_offset, parent) newnode.postinit(_visit_or_none(node, 'dest', self, newnode), [self.visit(child, newnode) for child in node.values]) return newnode def visit_raise(self, node, parent): """visit a Raise node by returning a fresh instance of it""" newnode = nodes.Raise(node.lineno, node.col_offset, parent) # pylint: disable=too-many-function-args newnode.postinit(_visit_or_none(node, 'type', self, newnode), _visit_or_none(node, 'inst', self, newnode), _visit_or_none(node, 'tback', self, newnode)) return newnode def visit_return(self, node, parent): """visit a Return node by returning a fresh instance of it""" newnode = nodes.Return(node.lineno, node.col_offset, parent) if node.value is not None: newnode.postinit(self.visit(node.value, newnode)) return newnode def visit_set(self, node, parent): """visit a Set node by returning a fresh instance of it""" newnode = nodes.Set(node.lineno, node.col_offset, parent) newnode.postinit([self.visit(child, newnode) for child in node.elts]) return newnode def visit_setcomp(self, node, parent): """visit a SetComp node by returning a fresh instance of it""" newnode = nodes.SetComp(node.lineno, node.col_offset, parent) newnode.postinit(self.visit(node.elt, newnode), [self.visit(child, newnode) for child in node.generators]) return newnode def visit_slice(self, node, parent): """visit a Slice node by returning a fresh instance of it""" newnode = nodes.Slice(parent=parent) newnode.postinit(_visit_or_none(node, 'lower', self, newnode), _visit_or_none(node, 'upper', self, newnode), _visit_or_none(node, 'step', self, newnode)) return newnode def visit_subscript(self, node, parent): """visit a Subscript node by returning a fresh instance of it""" context = self._get_context(node) newnode = nodes.Subscript(ctx=context, lineno=node.lineno, col_offset=node.col_offset, parent=parent) newnode.postinit(self.visit(node.value, newnode), self.visit(node.slice, newnode)) return newnode def visit_tryexcept(self, node, parent): """visit a TryExcept node by returning a fresh instance of it""" newnode = nodes.TryExcept(node.lineno, node.col_offset, parent) newnode.postinit([self.visit(child, newnode) for child in node.body], [self.visit(child, newnode) for child in node.handlers], [self.visit(child, newnode) for child in node.orelse]) return newnode def visit_tryfinally(self, node, parent): """visit a TryFinally node by returning a fresh instance of it""" newnode = nodes.TryFinally(node.lineno, node.col_offset, parent) newnode.postinit([self.visit(child, newnode) for child in node.body], [self.visit(n, newnode) for n in node.finalbody]) return newnode def visit_tuple(self, node, parent): """visit a Tuple node by returning a fresh instance of it""" context = self._get_context(node) newnode = nodes.Tuple(ctx=context, lineno=node.lineno, col_offset=node.col_offset, parent=parent) newnode.postinit([self.visit(child, newnode) for child in node.elts]) return newnode def visit_unaryop(self, node, parent): """visit a UnaryOp node by returning a fresh instance of it""" newnode = nodes.UnaryOp(self._unary_op_classes[node.op.__class__], node.lineno, node.col_offset, parent) newnode.postinit(self.visit(node.operand, newnode)) return newnode def visit_while(self, node, parent): """visit a While node by returning a fresh instance of it""" newnode = nodes.While(node.lineno, node.col_offset, parent) newnode.postinit(self.visit(node.test, newnode), [self.visit(child, newnode) for child in node.body], [self.visit(child, newnode) for child in node.orelse]) return newnode def visit_with(self, node, parent): newnode = nodes.With(node.lineno, node.col_offset, parent) expr = self.visit(node.context_expr, newnode) if node.optional_vars is not None: optional_vars = self.visit(node.optional_vars, newnode) else: optional_vars = None type_annotation = self.check_type_comment(node) newnode.postinit( items=[(expr, optional_vars)], body=[self.visit(child, newnode) for child in node.body], type_annotation=type_annotation, ) return newnode def visit_yield(self, node, parent): """visit a Yield node by returning a fresh instance of it""" newnode = nodes.Yield(node.lineno, node.col_offset, parent) if node.value is not None: newnode.postinit(self.visit(node.value, newnode)) return newnode class TreeRebuilder3(TreeRebuilder): """extend and overwrite TreeRebuilder for python3k""" def visit_arg(self, node, parent): """visit a arg node by returning a fresh AssName instance""" return self.visit_assignname(node, parent, node.arg) def visit_nameconstant(self, node, parent): # in Python 3.4 we have NameConstant for True / False / None return nodes.Const(node.value, getattr(node, 'lineno', None), getattr(node, 'col_offset', None), parent) def visit_excepthandler(self, node, parent): """visit an ExceptHandler node by returning a fresh instance of it""" newnode = nodes.ExceptHandler(node.lineno, node.col_offset, parent) if node.name: name = self.visit_assignname(node, newnode, node.name) else: name = None newnode.postinit(_visit_or_none(node, 'type', self, newnode), name, [self.visit(child, newnode) for child in node.body]) return newnode def visit_nonlocal(self, node, parent): """visit a Nonlocal node and return a new instance of it""" return nodes.Nonlocal(node.names, getattr(node, 'lineno', None), getattr(node, 'col_offset', None), parent) def visit_raise(self, node, parent): """visit a Raise node by returning a fresh instance of it""" newnode = nodes.Raise(node.lineno, node.col_offset, parent) # no traceback; anyway it is not used in Pylint newnode.postinit(_visit_or_none(node, 'exc', self, newnode), _visit_or_none(node, 'cause', self, newnode)) return newnode def visit_starred(self, node, parent): """visit a Starred node and return a new instance of it""" context = self._get_context(node) newnode = nodes.Starred(ctx=context, lineno=node.lineno, col_offset=node.col_offset, parent=parent) newnode.postinit(self.visit(node.value, newnode)) return newnode def visit_try(self, node, parent): # python 3.3 introduce a new Try node replacing # TryFinally/TryExcept nodes if node.finalbody: newnode = nodes.TryFinally(node.lineno, node.col_offset, parent) if node.handlers: body = [self.visit_tryexcept(node, newnode)] else: body = [self.visit(child, newnode) for child in node.body] newnode.postinit(body, [self.visit(n, newnode) for n in node.finalbody]) return newnode elif node.handlers: return self.visit_tryexcept(node, parent) return None def visit_annassign(self, node, parent): """visit an AnnAssign node by returning a fresh instance of it""" newnode = nodes.AnnAssign(node.lineno, node.col_offset, parent) annotation = _visit_or_none(node, 'annotation', self, newnode) newnode.postinit(target=self.visit(node.target, newnode), annotation=annotation, simple=node.simple, value=_visit_or_none(node, 'value', self, newnode)) return newnode def _visit_with(self, cls, node, parent): if 'items' not in node._fields: # python < 3.3 return super(TreeRebuilder3, self).visit_with(node, parent) newnode = cls(node.lineno, node.col_offset, parent) def visit_child(child): expr = self.visit(child.context_expr, newnode) var = _visit_or_none(child, 'optional_vars', self, newnode) return expr, var type_annotation = self.check_type_comment(node) newnode.postinit( items=[visit_child(child) for child in node.items], body=[self.visit(child, newnode) for child in node.body], type_annotation=type_annotation, ) return newnode def visit_with(self, node, parent): return self._visit_with(nodes.With, node, parent) def visit_yieldfrom(self, node, parent): newnode = nodes.YieldFrom(node.lineno, node.col_offset, parent) if node.value is not None: newnode.postinit(self.visit(node.value, newnode)) return newnode def visit_classdef(self, node, parent, newstyle=True): return super(TreeRebuilder3, self).visit_classdef(node, parent, newstyle=newstyle) # Async structs added in Python 3.5 def visit_asyncfunctiondef(self, node, parent): return self._visit_functiondef(nodes.AsyncFunctionDef, node, parent) def visit_asyncfor(self, node, parent): return self._visit_for(nodes.AsyncFor, node, parent) def visit_await(self, node, parent): newnode = nodes.Await(node.lineno, node.col_offset, parent) newnode.postinit(value=self.visit(node.value, newnode)) return newnode def visit_asyncwith(self, node, parent): return self._visit_with(nodes.AsyncWith, node, parent) def visit_joinedstr(self, node, parent): newnode = nodes.JoinedStr(node.lineno, node.col_offset, parent) newnode.postinit([self.visit(child, newnode) for child in node.values]) return newnode def visit_formattedvalue(self, node, parent): newnode = nodes.FormattedValue(node.lineno, node.col_offset, parent) newnode.postinit(self.visit(node.value, newnode), node.conversion, _visit_or_none(node, 'format_spec', self, newnode)) return newnode if sys.version_info >= (3, 0): TreeRebuilder = TreeRebuilder3 astroid-2.0.1/astroid/scoped_nodes.py0000644000076500000240000026510613324063433020427 0ustar claudiustaff00000000000000# -*- coding: utf-8 -*- # Copyright (c) 2006-2014 LOGILAB S.A. (Paris, FRANCE) # Copyright (c) 2010 Daniel Harding # Copyright (c) 2011, 2013-2015 Google, Inc. # Copyright (c) 2013-2018 Claudiu Popa # Copyright (c) 2013 Phil Schaf # Copyright (c) 2014 Eevee (Alex Munroe) # Copyright (c) 2015-2016 Florian Bruhin # Copyright (c) 2015-2016 Ceridwen # Copyright (c) 2015 Rene Zhang # Copyright (c) 2015 Philip Lorenz # Copyright (c) 2016-2017 Derek Gustafson # Copyright (c) 2017-2018 Bryce Guinta # Copyright (c) 2017-2018 Ashley Whetter # Copyright (c) 2017 Łukasz Rogalski # Copyright (c) 2017 David Euresti # Copyright (c) 2018 Nick Drozd # Copyright (c) 2018 Anthony Sottile # Copyright (c) 2018 HoverHell # Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html # For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER """ This module contains the classes for "scoped" node, i.e. which are opening a new local scope in the language definition : Module, ClassDef, FunctionDef (and Lambda, GeneratorExp, DictComp and SetComp to some extent). """ import builtins import sys import io import itertools from typing import Optional, List from astroid import bases from astroid import context as contextmod from astroid import exceptions from astroid import decorators as decorators_mod from astroid.interpreter import objectmodel from astroid.interpreter import dunder_lookup from astroid import manager from astroid import mixins from astroid import node_classes from astroid import util BUILTINS = builtins.__name__ ITER_METHODS = ('__iter__', '__getitem__') def _c3_merge(sequences, cls, context): """Merges MROs in *sequences* to a single MRO using the C3 algorithm. Adapted from http://www.python.org/download/releases/2.3/mro/. """ result = [] while True: sequences = [s for s in sequences if s] # purge empty sequences if not sequences: return result for s1 in sequences: # find merge candidates among seq heads candidate = s1[0] for s2 in sequences: if candidate in s2[1:]: candidate = None break # reject the current head, it appears later else: break if not candidate: # Show all the remaining bases, which were considered as # candidates for the next mro sequence. raise exceptions.InconsistentMroError( message="Cannot create a consistent method resolution order " "for MROs {mros} of class {cls!r}.", mros=sequences, cls=cls, context=context) result.append(candidate) # remove the chosen candidate for seq in sequences: if seq[0] == candidate: del seq[0] return None def _verify_duplicates_mro(sequences, cls, context): for sequence in sequences: names = [(node.lineno, node.qname()) for node in sequence if node.name] if len(names) != len(set(names)): raise exceptions.DuplicateBasesError( message='Duplicates found in MROs {mros} for {cls!r}.', mros=sequences, cls=cls, context=context) def function_to_method(n, klass): if isinstance(n, FunctionDef): if n.type == 'classmethod': return bases.BoundMethod(n, klass) if n.type != 'staticmethod': return bases.UnboundMethod(n) return n MANAGER = manager.AstroidManager() def builtin_lookup(name): """lookup a name into the builtin module return the list of matching statements and the astroid for the builtin module """ builtin_astroid = MANAGER.ast_from_module(builtins) if name == '__dict__': return builtin_astroid, () try: stmts = builtin_astroid.locals[name] except KeyError: stmts = () return builtin_astroid, stmts # TODO move this Mixin to mixins.py; problem: 'FunctionDef' in _scope_lookup class LocalsDictNodeNG(node_classes.LookupMixIn, node_classes.NodeNG): """ this class provides locals handling common to Module, FunctionDef and ClassDef nodes, including a dict like interface for direct access to locals information """ # attributes below are set by the builder module or by raw factories locals = {} """A map of the name of a local variable to the node defining the local. :type: dict(str, NodeNG) """ def qname(self): """Get the 'qualified' name of the node. For example: module.name, module.class.name ... :returns: The qualified name. :rtype: str """ # pylint: disable=no-member; github.com/pycqa/astroid/issues/278 if self.parent is None: return self.name return '%s.%s' % (self.parent.frame().qname(), self.name) def frame(self): """The first parent frame node. A frame node is a :class:`Module`, :class:`FunctionDef`, or :class:`ClassDef`. :returns: The first parent frame node. :rtype: Module or FunctionDef or ClassDef """ return self def scope(self): """The first parent node defining a new scope. :returns: The first parent scope node. :rtype: Module or FunctionDef or ClassDef or Lambda or GenExpr """ return self def _scope_lookup(self, node, name, offset=0): """XXX method for interfacing the scope lookup""" try: stmts = node._filter_stmts(self.locals[name], self, offset) except KeyError: stmts = () if stmts: return self, stmts if self.parent: # i.e. not Module # nested scope: if parent scope is a function, that's fine # else jump to the module pscope = self.parent.scope() if not pscope.is_function: pscope = pscope.root() return pscope.scope_lookup(node, name) return builtin_lookup(name) # Module def set_local(self, name, stmt): """Define that the given name is declared in the given statement node. .. seealso:: :meth:`scope` :param name: The name that is being defined. :type name: str :param stmt: The statement that defines the given name. :type stmt: NodeNG """ #assert not stmt in self.locals.get(name, ()), (self, stmt) self.locals.setdefault(name, []).append(stmt) __setitem__ = set_local def _append_node(self, child): """append a child, linking it in the tree""" # pylint: disable=no-member; depending by the class # which uses the current class as a mixin or base class. # It's rewritten in 2.0, so it makes no sense for now # to spend development time on it. self.body.append(child) child.parent = self def add_local_node(self, child_node, name=None): """Append a child that should alter the locals of this scope node. :param child_node: The child node that will alter locals. :type child_node: NodeNG :param name: The name of the local that will be altered by the given child node. :type name: str or None """ if name != '__class__': # add __class__ node as a child will cause infinite recursion later! self._append_node(child_node) self.set_local(name or child_node.name, child_node) def __getitem__(self, item): """The first node the defines the given local. :param item: The name of the locally defined object. :type item: str :raises KeyError: If the name is not defined. """ return self.locals[item][0] def __iter__(self): """Iterate over the names of locals defined in this scoped node. :returns: The names of the defined locals. :rtype: iterable(str) """ return iter(self.keys()) def keys(self): """The names of locals defined in this scoped node. :returns: The names of the defined locals. :rtype: list(str) """ return list(self.locals.keys()) def values(self): """The nodes that define the locals in this scoped node. :returns: The nodes that define locals. :rtype: list(NodeNG) """ return [self[key] for key in self.keys()] def items(self): """Get the names of the locals and the node that defines the local. :returns: The names of locals and their asociated node. :rtype: list(tuple(str, NodeNG)) """ return list(zip(self.keys(), self.values())) def __contains__(self, name): """Check if a local is defined in this scope. :param name: The name of the local to check for. :type name: str :returns: True if this node has a local of the given name, False otherwise. :rtype: bool """ return name in self.locals class Module(LocalsDictNodeNG): """Class representing an :class:`ast.Module` node. >>> node = astroid.extract_node('import astroid') >>> node >>> node.parent """ _astroid_fields = ('body',) fromlineno = 0 """The first line that this node appears on in the source code. :type: int or None """ lineno = 0 """The line that this node appears on in the source code. :type: int or None """ # attributes below are set by the builder module or by raw factories file = None """The path to the file that this ast has been extracted from. This will be ``None`` when the representation has been built from a built-in module. :type: str or None """ file_bytes = None """The string/bytes that this ast was built from. :type: str or bytes or None """ file_encoding = None """The encoding of the source file. This is used to get unicode out of a source file. Python 2 only. :type: str or None """ name = None """The name of the module. :type: str or None """ pure_python = None """Whether the ast was built from source. :type: bool or None """ package = None """Whether the node represents a package or a module. :type: bool or None """ globals = None """A map of the name of a global variable to the node defining the global. :type: dict(str, NodeNG) """ # Future imports future_imports = None """The imports from ``__future__``. :type: set(str) or None """ special_attributes = objectmodel.ModuleModel() """The names of special attributes that this module has. :type: objectmodel.ModuleModel """ # names of module attributes available through the global scope scope_attrs = {'__name__', '__doc__', '__file__', '__path__', '__package__'} """The names of module attributes available through the global scope. :type: str(str) """ _other_fields = ('name', 'doc', 'file', 'path', 'package', 'pure_python', 'future_imports') _other_other_fields = ('locals', 'globals') def __init__(self, name, doc, file=None, path: Optional[List[str]] = None, package=None, parent=None, pure_python=True): """ :param name: The name of the module. :type name: str :param doc: The module docstring. :type doc: str :param file: The path to the file that this ast has been extracted from. :type file: str or None :param path: :type path: Optional[List[str]] :param package: Whether the node represents a package or a module. :type package: bool or None :param parent: The parent node in the syntax tree. :type parent: NodeNG or None :param pure_python: Whether the ast was built from source. :type pure_python: bool or None """ self.name = name self.doc = doc self.file = file self.path = path self.package = package self.parent = parent self.pure_python = pure_python self.locals = self.globals = {} """A map of the name of a local variable to the node defining the local. :type: dict(str, NodeNG) """ self.body = [] """The contents of the module. :type: list(NodeNG) or None """ self.future_imports = set() # pylint: enable=redefined-builtin def postinit(self, body=None): """Do some setup after initialisation. :param body: The contents of the module. :type body: list(NodeNG) or None """ self.body = body def _get_stream(self): if self.file_bytes is not None: return io.BytesIO(self.file_bytes) if self.file is not None: stream = open(self.file, 'rb') return stream return None def stream(self): """Get a stream to the underlying file or bytes. :type: file or io.BytesIO or None """ return self._get_stream() def block_range(self, lineno): """Get a range from where this node starts to where this node ends. :param lineno: Unused. :type lineno: int :returns: The range of line numbers that this node belongs to. :rtype: tuple(int, int) """ return self.fromlineno, self.tolineno def scope_lookup(self, node, name, offset=0): """Lookup where the given variable is assigned. :param node: The node to look for assignments up to. Any assignments after the given node are ignored. :type node: NodeNG :param name: The name of the variable to find assignments for. :type name: str :param offset: The line offset to filter statements up to. :type offset: int :returns: This scope node and the list of assignments associated to the given name according to the scope where it has been found (locals, globals or builtin). :rtype: tuple(str, list(NodeNG)) """ if name in self.scope_attrs and name not in self.locals: try: return self, self.getattr(name) except exceptions.AttributeInferenceError: return self, () return self._scope_lookup(node, name, offset) def pytype(self): """Get the name of the type that this node represents. :returns: The name of the type. :rtype: str """ return '%s.module' % BUILTINS def display_type(self): """A human readable type of this node. :returns: The type of this node. :rtype: str """ return 'Module' def getattr(self, name, context=None, ignore_locals=False): result = [] name_in_locals = name in self.locals if name in self.special_attributes and not ignore_locals and not name_in_locals: result = [self.special_attributes.lookup(name)] elif not ignore_locals and name_in_locals: result = self.locals[name] elif self.package: try: result = [self.import_module(name, relative_only=True)] except (exceptions.AstroidBuildingError, SyntaxError) as exc: raise exceptions.AttributeInferenceError( target=self, attribute=name, context=context, ) from exc result = [n for n in result if not isinstance(n, node_classes.DelName)] if result: return result raise exceptions.AttributeInferenceError(target=self, attribute=name, context=context) def igetattr(self, name, context=None): """Infer the possible values of the given variable. :param name: The name of the variable to infer. :type name: str :returns: The inferred possible values. :rtype: iterable(NodeNG) or None """ # set lookup name since this is necessary to infer on import nodes for # instance context = contextmod.copy_context(context) context.lookupname = name try: return bases._infer_stmts(self.getattr(name, context), context, frame=self) except exceptions.AttributeInferenceError as error: raise exceptions.InferenceError( error.message, target=self, attribute=name, context=context, ) from error def fully_defined(self): """Check if this module has been build from a .py file. If so, the module contains a complete representation, including the code. :returns: True if the module has been built from a .py file. :rtype: bool """ return self.file is not None and self.file.endswith('.py') def statement(self): """The first parent node, including self, marked as statement node. :returns: The first parent statement. :rtype: NodeNG """ return self def previous_sibling(self): """The previous sibling statement. :returns: The previous sibling statement node. :rtype: NodeNG or None """ def next_sibling(self): """The next sibling statement node. :returns: The next sibling statement node. :rtype: NodeNG or None """ _absolute_import_activated = True def absolute_import_activated(self): """Whether :pep:`328` absolute import behaviour has been enabled. :returns: True if :pep:`328` has been enabled, False otherwise. :rtype: bool """ return self._absolute_import_activated def import_module(self, modname, relative_only=False, level=None): """Get the ast for a given module as if imported from this module. :param modname: The name of the module to "import". :type modname: str :param relative_only: Whether to only consider relative imports. :type relative_only: bool :param level: The level of relative import. :type level: int or None :returns: The imported module ast. :rtype: NodeNG """ if relative_only and level is None: level = 0 absmodname = self.relative_to_absolute_name(modname, level) try: return MANAGER.ast_from_module_name(absmodname) except exceptions.AstroidBuildingError: # we only want to import a sub module or package of this module, # skip here if relative_only: raise return MANAGER.ast_from_module_name(modname) def relative_to_absolute_name(self, modname, level): """Get the absolute module name for a relative import. The relative import can be implicit or explicit. :param modname: The module name to convert. :type modname: str :param level: The level of relative import. :type level: int :returns: The absolute module name. :rtype: str :raises TooManyLevelsError: When the relative import refers to a module too far above this one. """ # XXX this returns non sens when called on an absolute import # like 'pylint.checkers.astroid.utils' # XXX doesn't return absolute name if self.name isn't absolute name if self.absolute_import_activated() and level is None: return modname if level: if self.package: level = level - 1 if level and self.name.count('.') < level: raise exceptions.TooManyLevelsError( level=level, name=self.name) package_name = self.name.rsplit('.', level)[0] elif self.package: package_name = self.name else: package_name = self.name.rsplit('.', 1)[0] if package_name: if not modname: return package_name return '%s.%s' % (package_name, modname) return modname def wildcard_import_names(self): """The list of imported names when this module is 'wildcard imported'. It doesn't include the '__builtins__' name which is added by the current CPython implementation of wildcard imports. :returns: The list of imported names. :rtype: list(str) """ # We separate the different steps of lookup in try/excepts # to avoid catching too many Exceptions default = [name for name in self.keys() if not name.startswith('_')] try: all_values = self['__all__'] except KeyError: return default try: explicit = next(all_values.assigned_stmts()) except exceptions.InferenceError: return default except AttributeError: # not an assignment node # XXX infer? return default # Try our best to detect the exported name. inferred = [] try: explicit = next(explicit.infer()) except exceptions.InferenceError: return default if not isinstance(explicit, (node_classes.Tuple, node_classes.List)): return default str_const = lambda node: (isinstance(node, node_classes.Const) and isinstance(node.value, str)) for node in explicit.elts: if str_const(node): inferred.append(node.value) else: try: inferred_node = next(node.infer()) except exceptions.InferenceError: continue if str_const(inferred_node): inferred.append(inferred_node.value) return inferred def public_names(self): """The list of the names that are publicly available in this module. :returns: The list of publc names. :rtype: list(str) """ return [name for name in self.keys() if not name.startswith('_')] def bool_value(self): """Determine the boolean value of this node. :returns: The boolean value of this node. For a :class:`Module` this is always ``True``. :rtype: bool """ return True def get_children(self): yield from self.body class ComprehensionScope(LocalsDictNodeNG): """Scoping for different types of comprehensions.""" def frame(self): """The first parent frame node. A frame node is a :class:`Module`, :class:`FunctionDef`, or :class:`ClassDef`. :returns: The first parent frame node. :rtype: Module or FunctionDef or ClassDef """ return self.parent.frame() scope_lookup = LocalsDictNodeNG._scope_lookup class GeneratorExp(ComprehensionScope): """Class representing an :class:`ast.GeneratorExp` node. >>> node = astroid.extract_node('(thing for thing in things if thing)') >>> node """ _astroid_fields = ('elt', 'generators') _other_other_fields = ('locals',) elt = None """The element that forms the output of the expression. :type: NodeNG or None """ generators = None """The generators that are looped through. :type: list(Comprehension) or None """ def __init__(self, lineno=None, col_offset=None, parent=None): """ :param lineno: The line that this node appears on in the source code. :type lineno: int or None :param col_offset: The column that this node appears on in the source code. :type col_offset: int or None :param parent: The parent node in the syntax tree. :type parent: NodeNG or None """ self.locals = {} """A map of the name of a local variable to the node defining the local. :type: dict(str, NodeNG) """ super(GeneratorExp, self).__init__(lineno, col_offset, parent) def postinit(self, elt=None, generators=None): """Do some setup after initialisation. :param elt: The element that forms the output of the expression. :type elt: NodeNG or None :param generators: The generators that are looped through. :type generators: list(Comprehension) or None """ self.elt = elt if generators is None: self.generators = [] else: self.generators = generators def bool_value(self): """Determine the boolean value of this node. :returns: The boolean value of this node. For a :class:`GeneratorExp` this is always ``True``. :rtype: bool """ return True def get_children(self): yield self.elt yield from self.generators class DictComp(ComprehensionScope): """Class representing an :class:`ast.DictComp` node. >>> node = astroid.extract_node('{k:v for k, v in things if k > v}') >>> node """ _astroid_fields = ('key', 'value', 'generators') _other_other_fields = ('locals',) key = None """What produces the keys. :type: NodeNG or None """ value = None """What produces the values. :type: NodeNG or None """ generators = None """The generators that are looped through. :type: list(Comprehension) or None """ def __init__(self, lineno=None, col_offset=None, parent=None): """ :param lineno: The line that this node appears on in the source code. :type lineno: int or None :param col_offset: The column that this node appears on in the source code. :type col_offset: int or None :param parent: The parent node in the syntax tree. :type parent: NodeNG or None """ self.locals = {} """A map of the name of a local variable to the node defining the local. :type: dict(str, NodeNG) """ super(DictComp, self).__init__(lineno, col_offset, parent) def postinit(self, key=None, value=None, generators=None): """Do some setup after initialisation. :param key: What produces the keys. :type key: NodeNG or None :param value: What produces the values. :type value: NodeNG or None :param generators: The generators that are looped through. :type generators: list(Comprehension) or None """ self.key = key self.value = value if generators is None: self.generators = [] else: self.generators = generators def bool_value(self): """Determine the boolean value of this node. :returns: The boolean value of this node. For a :class:`DictComp` this is always :class:`Uninferable`. :rtype: Uninferable """ return util.Uninferable def get_children(self): yield self.key yield self.value yield from self.generators class SetComp(ComprehensionScope): """Class representing an :class:`ast.SetComp` node. >>> node = astroid.extract_node('{thing for thing in things if thing}') >>> node """ _astroid_fields = ('elt', 'generators') _other_other_fields = ('locals',) elt = None """The element that forms the output of the expression. :type: NodeNG or None """ generators = None """The generators that are looped through. :type: list(Comprehension) or None """ def __init__(self, lineno=None, col_offset=None, parent=None): """ :param lineno: The line that this node appears on in the source code. :type lineno: int or None :param col_offset: The column that this node appears on in the source code. :type col_offset: int or None :param parent: The parent node in the syntax tree. :type parent: NodeNG or None """ self.locals = {} """A map of the name of a local variable to the node defining the local. :type: dict(str, NodeNG) """ super(SetComp, self).__init__(lineno, col_offset, parent) def postinit(self, elt=None, generators=None): """Do some setup after initialisation. :param elt: The element that forms the output of the expression. :type elt: NodeNG or None :param generators: The generators that are looped through. :type generators: list(Comprehension) or None """ self.elt = elt if generators is None: self.generators = [] else: self.generators = generators def bool_value(self): """Determine the boolean value of this node. :returns: The boolean value of this node. For a :class:`SetComp` this is always :class:`Uninferable`. :rtype: Uninferable """ return util.Uninferable def get_children(self): yield self.elt yield from self.generators class _ListComp(node_classes.NodeNG): """Class representing an :class:`ast.ListComp` node. >>> node = astroid.extract_node('[thing for thing in things if thing]') >>> node """ _astroid_fields = ('elt', 'generators') elt = None """The element that forms the output of the expression. :type: NodeNG or None """ generators = None """The generators that are looped through. :type: list(Comprehension) or None """ def postinit(self, elt=None, generators=None): """Do some setup after initialisation. :param elt: The element that forms the output of the expression. :type elt: NodeNG or None :param generators: The generators that are looped through. :type generators: list(Comprehension) or None """ self.elt = elt self.generators = generators def bool_value(self): """Determine the boolean value of this node. :returns: The boolean value of this node. For a :class:`ListComp` this is always :class:`Uninferable`. :rtype: Uninferable """ return util.Uninferable def get_children(self): yield self.elt yield from self.generators class ListComp(_ListComp, ComprehensionScope): """Class representing an :class:`ast.ListComp` node. >>> node = astroid.extract_node('[thing for thing in things if thing]') >>> node """ _other_other_fields = ('locals',) def __init__(self, lineno=None, col_offset=None, parent=None): self.locals = {} """A map of the name of a local variable to the node defining it. :type: dict(str, NodeNG) """ super(ListComp, self).__init__(lineno, col_offset, parent) def _infer_decorator_callchain(node): """Detect decorator call chaining and see if the end result is a static or a classmethod. """ if not isinstance(node, FunctionDef): return None if not node.parent: return None try: result = next(node.infer_call_result(node.parent)) except (StopIteration, exceptions.InferenceError): return None if isinstance(result, bases.Instance): result = result._proxied if isinstance(result, ClassDef): if result.is_subtype_of('%s.classmethod' % BUILTINS): return 'classmethod' if result.is_subtype_of('%s.staticmethod' % BUILTINS): return 'staticmethod' return None class Lambda(mixins.FilterStmtsMixin, LocalsDictNodeNG): """Class representing an :class:`ast.Lambda` node. >>> node = astroid.extract_node('lambda arg: arg + 1') >>> node l.1 at 0x7f23b2e41518> """ _astroid_fields = ('args', 'body',) _other_other_fields = ('locals',) name = '' is_lambda = True def implicit_parameters(self): return 0 # function's type, 'function' | 'method' | 'staticmethod' | 'classmethod' @property def type(self): """Whether this is a method or function. :returns: 'method' if this is a method, 'function' otherwise. :rtype: str """ # pylint: disable=no-member if self.args.args and self.args.args[0].name == 'self': if isinstance(self.parent.scope(), ClassDef): return 'method' return 'function' def __init__(self, lineno=None, col_offset=None, parent=None): """ :param lineno: The line that this node appears on in the source code. :type lineno: int or None :param col_offset: The column that this node appears on in the source code. :type col_offset: int or None :param parent: The parent node in the syntax tree. :type parent: NodeNG or None """ self.locals = {} """A map of the name of a local variable to the node defining it. :type: dict(str, NodeNG) """ self.args = [] """The arguments that the function takes. :type: Arguments or list """ self.body = [] """The contents of the function body. :type: list(NodeNG) """ super(Lambda, self).__init__(lineno, col_offset, parent) def postinit(self, args, body): """Do some setup after initialisation. :param args: The arguments that the function takes. :type args: Arguments :param body: The contents of the function body. :type body: list(NodeNG) """ self.args = args self.body = body def pytype(self): """Get the name of the type that this node represents. :returns: The name of the type. :rtype: str """ if 'method' in self.type: return '%s.instancemethod' % BUILTINS return '%s.function' % BUILTINS def display_type(self): """A human readable type of this node. :returns: The type of this node. :rtype: str """ if 'method' in self.type: return 'Method' return 'Function' def callable(self): """Whether this node defines something that is callable. :returns: True if this defines something that is callable, False otherwise. For a :class:`Lambda` this is always ``True``. :rtype: bool """ return True def argnames(self): """Get the names of each of the arguments. :returns: The names of the arguments. :rtype: list(str) """ # pylint: disable=no-member; github.com/pycqa/astroid/issues/291 # args is in fact redefined later on by postinit. Can't be changed # to None due to a strong interaction between Lambda and FunctionDef. if self.args.args: # maybe None with builtin functions names = _rec_get_names(self.args.args) else: names = [] if self.args.vararg: names.append(self.args.vararg) if self.args.kwarg: names.append(self.args.kwarg) return names def infer_call_result(self, caller, context=None): """Infer what the function returns when called. :param caller: Unused :type caller: object """ # pylint: disable=no-member; github.com/pycqa/astroid/issues/291 # args is in fact redefined later on by postinit. Can't be changed # to None due to a strong interaction between Lambda and FunctionDef. return self.body.infer(context) def scope_lookup(self, node, name, offset=0): """Lookup where the given names is assigned. :param node: The node to look for assignments up to. Any assignments after the given node are ignored. :type node: NodeNG :param name: The name to find assignments for. :type name: str :param offset: The line offset to filter statements up to. :type offset: int :returns: This scope node and the list of assignments associated to the given name according to the scope where it has been found (locals, globals or builtin). :rtype: tuple(str, list(NodeNG)) """ # pylint: disable=no-member; github.com/pycqa/astroid/issues/291 # args is in fact redefined later on by postinit. Can't be changed # to None due to a strong interaction between Lambda and FunctionDef. if node in self.args.defaults or node in self.args.kw_defaults: frame = self.parent.frame() # line offset to avoid that def func(f=func) resolve the default # value to the defined function offset = -1 else: # check this is not used in function decorators frame = self return frame._scope_lookup(node, name, offset) def bool_value(self): """Determine the boolean value of this node. :returns: The boolean value of this node. For a :class:`Lambda` this is always ``True``. :rtype: bool """ return True def get_children(self): yield self.args yield self.body class FunctionDef(mixins.MultiLineBlockMixin, node_classes.Statement, Lambda): """Class representing an :class:`ast.FunctionDef`. >>> node = astroid.extract_node(''' ... def my_func(arg): ... return arg + 1 ... ''') >>> node """ _astroid_fields = ('decorators', 'args', 'returns', 'body') _multi_line_block_fields = ('body',) returns = None decorators = None """The decorators that are applied to this method or function. :type: Decorators or None """ special_attributes = objectmodel.FunctionModel() """The names of special attributes that this function has. :type: objectmodel.FunctionModel """ is_function = True """Whether this node indicates a function. For a :class:`FunctionDef` this is always ``True``. :type: bool """ type_annotation = None """If present, this will contain the type annotation passed by a type comment :type: NodeNG or None """ type_comment_args = None """ If present, this will contain the type annotation for arguments passed by a type comment """ type_comment_returns = None """If present, this will contain the return type annotation, passed by a type comment""" # attributes below are set by the builder module or by raw factories _other_fields = ('name', 'doc') _other_other_fields = ( 'locals', '_type', 'type_comment_returns', 'type_comment_args', ) _type = None def __init__(self, name=None, doc=None, lineno=None, col_offset=None, parent=None): """ :param name: The name of the function. :type name: str or None :param doc: The function's docstring. :type doc: str or None :param lineno: The line that this node appears on in the source code. :type lineno: int or None :param col_offset: The column that this node appears on in the source code. :type col_offset: int or None :param parent: The parent node in the syntax tree. :type parent: NodeNG or None """ self.name = name """The name of the function. :type name: str or None """ self.doc = doc """The function's docstring. :type doc: str or None """ self.instance_attrs = {} super(FunctionDef, self).__init__(lineno, col_offset, parent) if parent: frame = parent.frame() frame.set_local(name, self) # pylint: disable=arguments-differ; different than Lambdas def postinit(self, args, body, decorators=None, returns=None, type_comment_returns=None, type_comment_args=None): """Do some setup after initialisation. :param args: The arguments that the function takes. :type args: Arguments or list :param body: The contents of the function body. :type body: list(NodeNG) :param decorators: The decorators that are applied to this method or function. :type decorators: Decorators or None :params type_comment_returns: The return type annotation passed via a type comment. :params type_comment_args: The args type annotation passed via a type comment. """ self.args = args self.body = body self.decorators = decorators self.returns = returns self.type_comment_returns = type_comment_returns self.type_comment_args = type_comment_args if isinstance(self.parent.frame(), ClassDef): self.set_local('__class__', self.parent.frame()) @decorators_mod.cachedproperty def extra_decorators(self): """The extra decorators that this function can have. Additional decorators are considered when they are used as assignments, as in ``method = staticmethod(method)``. The property will return all the callables that are used for decoration. :type: list(NodeNG) """ frame = self.parent.frame() if not isinstance(frame, ClassDef): return [] decorators = [] for assign in frame._get_assign_nodes(): if (isinstance(assign.value, node_classes.Call) and isinstance(assign.value.func, node_classes.Name)): for assign_node in assign.targets: if not isinstance(assign_node, node_classes.AssignName): # Support only `name = callable(name)` continue if assign_node.name != self.name: # Interested only in the assignment nodes that # decorates the current method. continue try: meth = frame[self.name] except KeyError: continue else: # Must be a function and in the same frame as the # original method. if (isinstance(meth, FunctionDef) and assign_node.frame() == frame): decorators.append(assign.value) return decorators @decorators_mod.cachedproperty def type(self): """The function type for this node. Possible values are: method, function, staticmethod, classmethod. :type: str """ builtin_descriptors = {'classmethod', 'staticmethod'} for decorator in self.extra_decorators: if decorator.func.name in builtin_descriptors: return decorator.func.name frame = self.parent.frame() type_name = 'function' if isinstance(frame, ClassDef): if self.name == '__new__': return 'classmethod' if sys.version_info >= (3, 6) and self.name == '__init_subclass__': return 'classmethod' type_name = 'method' if not self.decorators: return type_name for node in self.decorators.nodes: if isinstance(node, node_classes.Name): if node.name in builtin_descriptors: return node.name if isinstance(node, node_classes.Call): # Handle the following case: # @some_decorator(arg1, arg2) # def func(...) # try: current = next(node.func.infer()) except exceptions.InferenceError: continue _type = _infer_decorator_callchain(current) if _type is not None: return _type try: for inferred in node.infer(): # Check to see if this returns a static or a class method. _type = _infer_decorator_callchain(inferred) if _type is not None: return _type if not isinstance(inferred, ClassDef): continue for ancestor in inferred.ancestors(): if not isinstance(ancestor, ClassDef): continue if ancestor.is_subtype_of('%s.classmethod' % BUILTINS): return 'classmethod' if ancestor.is_subtype_of('%s.staticmethod' % BUILTINS): return 'staticmethod' except exceptions.InferenceError: pass return type_name @decorators_mod.cachedproperty def fromlineno(self): """The first line that this node appears on in the source code. :type: int or None """ # lineno is the line number of the first decorator, we want the def # statement lineno lineno = self.lineno if self.decorators is not None: lineno += sum(node.tolineno - node.lineno + 1 for node in self.decorators.nodes) return lineno @decorators_mod.cachedproperty def blockstart_tolineno(self): """The line on which the beginning of this block ends. :type: int """ return self.args.tolineno def block_range(self, lineno): """Get a range from the given line number to where this node ends. :param lineno: Unused. :type lineno: int :returns: The range of line numbers that this node belongs to, :rtype: tuple(int, int) """ return self.fromlineno, self.tolineno def getattr(self, name, context=None): """this method doesn't look in the instance_attrs dictionary since it's done by an Instance proxy at inference time. """ if name in self.instance_attrs: return self.instance_attrs[name] if name in self.special_attributes: return [self.special_attributes.lookup(name)] raise exceptions.AttributeInferenceError(target=self, attribute=name) def igetattr(self, name, context=None): """Inferred getattr, which returns an iterator of inferred statements.""" try: return bases._infer_stmts(self.getattr(name, context), context, frame=self) except exceptions.AttributeInferenceError as error: raise exceptions.InferenceError( error.message, target=self, attribute=name, context=context, ) from error def is_method(self): """Check if this function node represents a method. :returns: True if this is a method, False otherwise. :rtype: bool """ # check we are defined in a ClassDef, because this is usually expected # (e.g. pylint...) when is_method() return True return self.type != 'function' and isinstance(self.parent.frame(), ClassDef) @decorators_mod.cached def decoratornames(self): """Get the qualified names of each of the decorators on this function. :returns: The names of the decorators. :rtype: set(str) """ result = set() decoratornodes = [] if self.decorators is not None: decoratornodes += self.decorators.nodes decoratornodes += self.extra_decorators for decnode in decoratornodes: try: for infnode in decnode.infer(): result.add(infnode.qname()) except exceptions.InferenceError: continue return result def is_bound(self): """Check if the function is bound to an instance or class. :returns: True if the function is bound to an instance or class, False otherwise. :rtype: bool """ return self.type == 'classmethod' def is_abstract(self, pass_is_abstract=True): """Check if the method is abstract. A method is considered abstract if any of the following is true: * The only statement is 'raise NotImplementedError' * The only statement is 'pass' and pass_is_abstract is True * The method is annotated with abc.astractproperty/abc.abstractmethod :returns: True if the method is abstract, False otherwise. :rtype: bool """ if self.decorators: for node in self.decorators.nodes: try: inferred = next(node.infer()) except exceptions.InferenceError: continue if inferred and inferred.qname() in ('abc.abstractproperty', 'abc.abstractmethod'): return True for child_node in self.body: if isinstance(child_node, node_classes.Raise): if child_node.raises_not_implemented(): return True return pass_is_abstract and isinstance(child_node, node_classes.Pass) # empty function is the same as function with a single "pass" statement if pass_is_abstract: return True def is_generator(self): """Check if this is a generator function. :returns: True is this is a generator function, False otherwise. :rtype: bool """ return next(self._get_yield_nodes_skip_lambdas(), False) def infer_call_result(self, caller=None, context=None): """Infer what the function returns when called. :returns: What the function returns. :rtype: iterable(NodeNG or Uninferable) or None """ if self.is_generator(): result = bases.Generator(self) yield result return # This is really a gigantic hack to work around metaclass generators # that return transient class-generating functions. Pylint's AST structure # cannot handle a base class object that is only used for calling __new__, # but does not contribute to the inheritance structure itself. We inject # a fake class into the hierarchy here for several well-known metaclass # generators, and filter it out later. if (self.name == 'with_metaclass' and len(self.args.args) == 1 and self.args.vararg is not None): metaclass = next(caller.args[0].infer(context)) if isinstance(metaclass, ClassDef): class_bases = [next(arg.infer(context)) for arg in caller.args[1:]] new_class = ClassDef(name='temporary_class') new_class.hide = True new_class.parent = self new_class.postinit( bases=[base for base in class_bases if base != util.Uninferable], body=[], decorators=[], metaclass=metaclass, ) yield new_class return returns = self._get_return_nodes_skip_functions() first_return = next(returns, None) if not first_return: raise exceptions.InferenceError('Empty return iterator') for returnnode in itertools.chain((first_return,), returns): if returnnode.value is None: yield node_classes.Const(None) else: try: yield from returnnode.value.infer(context) except exceptions.InferenceError: yield util.Uninferable def bool_value(self): """Determine the boolean value of this node. :returns: The boolean value of this node. For a :class:`FunctionDef` this is always ``True``. :rtype: bool """ return True def get_children(self): if self.decorators is not None: yield self.decorators yield self.args if self.returns is not None: yield self.returns yield from self.body class AsyncFunctionDef(FunctionDef): """Class representing an :class:`ast.FunctionDef` node. A :class:`AsyncFunctionDef` is an asynchronous function created with the `async` keyword. >>> node = astroid.extract_node(''' async def func(things): async for thing in things: print(thing) ''') >>> node >>> node.body[0] """ def _rec_get_names(args, names=None): """return a list of all argument names""" if names is None: names = [] for arg in args: if isinstance(arg, node_classes.Tuple): _rec_get_names(arg.elts, names) else: names.append(arg.name) return names def _is_metaclass(klass, seen=None): """ Return if the given class can be used as a metaclass. """ if klass.name == 'type': return True if seen is None: seen = set() for base in klass.bases: try: for baseobj in base.infer(): baseobj_name = baseobj.qname() if baseobj_name in seen: continue else: seen.add(baseobj_name) if isinstance(baseobj, bases.Instance): # not abstract return False if baseobj is util.Uninferable: continue if baseobj is klass: continue if not isinstance(baseobj, ClassDef): continue if baseobj._type == 'metaclass': return True if _is_metaclass(baseobj, seen): return True except exceptions.InferenceError: continue return False def _class_type(klass, ancestors=None): """return a ClassDef node type to differ metaclass and exception from 'regular' classes """ # XXX we have to store ancestors in case we have a ancestor loop if klass._type is not None: return klass._type if _is_metaclass(klass): klass._type = 'metaclass' elif klass.name.endswith('Exception'): klass._type = 'exception' else: if ancestors is None: ancestors = set() klass_name = klass.qname() if klass_name in ancestors: # XXX we are in loop ancestors, and have found no type klass._type = 'class' return 'class' ancestors.add(klass_name) for base in klass.ancestors(recurs=False): name = _class_type(base, ancestors) if name != 'class': if name == 'metaclass' and not _is_metaclass(klass): # don't propagate it if the current class # can't be a metaclass continue klass._type = base.type break if klass._type is None: klass._type = 'class' return klass._type def get_wrapping_class(node): """Get the class that wraps the given node. We consider that a class wraps a node if the class is a parent for the said node. :returns: The class that wraps the given node :rtype: ClassDef or None """ klass = node.frame() while klass is not None and not isinstance(klass, ClassDef): if klass.parent is None: klass = None else: klass = klass.parent.frame() return klass class ClassDef(mixins.FilterStmtsMixin, LocalsDictNodeNG, node_classes.Statement): """Class representing an :class:`ast.ClassDef` node. >>> node = astroid.extract_node(''' class Thing: def my_meth(self, arg): return arg + self.offset ''') >>> node """ # some of the attributes below are set by the builder module or # by a raw factories # a dictionary of class instances attributes _astroid_fields = ('decorators', 'bases', 'body') # name decorators = None """The decorators that are applied to this class. :type: Decorators or None """ special_attributes = objectmodel.ClassModel() """The names of special attributes that this class has. :type: objectmodel.ClassModel """ _type = None _metaclass_hack = False hide = False type = property(_class_type, doc=("The class type for this node.\n\n" "Possible values are: class, metaclass, exception.\n\n" ":type: str")) _other_fields = ('name', 'doc') _other_other_fields = ('locals', '_newstyle') _newstyle = None def __init__(self, name=None, doc=None, lineno=None, col_offset=None, parent=None): """ :param name: The name of the class. :type name: str or None :param doc: The function's docstring. :type doc: str or None :param lineno: The line that this node appears on in the source code. :type lineno: int or None :param col_offset: The column that this node appears on in the source code. :type col_offset: int or None :param parent: The parent node in the syntax tree. :type parent: NodeNG or None """ self.instance_attrs = {} self.locals = {} """A map of the name of a local variable to the node defining it. :type: dict(str, NodeNG) """ self.keywords = [] """The keywords given to the class definition. This is usually for :pep:`3115` style metaclass declaration. :type: list(Keyword) or None """ self.bases = [] """What the class inherits from. :type: list(NodeNG) """ self.body = [] """The contents of the class body. :type: list(NodeNG) """ self.name = name """The name of the class. :type name: str or None """ self.doc = doc """The class' docstring. :type doc: str or None """ super(ClassDef, self).__init__(lineno, col_offset, parent) if parent is not None: parent.frame().set_local(name, self) for local_name, node in self.implicit_locals(): self.add_local_node(node, local_name) def implicit_parameters(self): return 1 def implicit_locals(self): """Get implicitly defined class definition locals. :returns: the the name and Const pair for each local :rtype: tuple(tuple(str, node_classes.Const), ...) """ locals_ = (('__module__', self.special_attributes.py__module__),) if sys.version_info >= (3, 3): # __qualname__ is defined in PEP3155 locals_ += (("__qualname__", self.special_attributes.py__qualname__),) return locals_ # pylint: disable=redefined-outer-name def postinit(self, bases, body, decorators, newstyle=None, metaclass=None, keywords=None): """Do some setup after initialisation. :param bases: What the class inherits from. :type bases: list(NodeNG) :param body: The contents of the class body. :type body: list(NodeNG) :param decorators: The decorators that are applied to this class. :type decorators: Decorators or None :param newstyle: Whether this is a new style class or not. :type newstyle: bool or None :param metaclass: The metaclass of this class. :type metaclass: NodeNG or None :param keywords: The keywords given to the class definition. :type keywords: list(Keyword) or None """ self.keywords = keywords self.bases = bases self.body = body self.decorators = decorators if newstyle is not None: self._newstyle = newstyle if metaclass is not None: self._metaclass = metaclass def _newstyle_impl(self, context=None): if context is None: context = contextmod.InferenceContext() if self._newstyle is not None: return self._newstyle for base in self.ancestors(recurs=False, context=context): if base._newstyle_impl(context): self._newstyle = True break klass = self.declared_metaclass() # could be any callable, we'd need to infer the result of klass(name, # bases, dict). punt if it's not a class node. if klass is not None and isinstance(klass, ClassDef): self._newstyle = klass._newstyle_impl(context) if self._newstyle is None: self._newstyle = False return self._newstyle _newstyle = None newstyle = property(_newstyle_impl, doc=("Whether this is a new style class or not\n\n" ":type: bool or None")) @decorators_mod.cachedproperty def blockstart_tolineno(self): """The line on which the beginning of this block ends. :type: int """ if self.bases: return self.bases[-1].tolineno return self.fromlineno def block_range(self, lineno): """Get a range from the given line number to where this node ends. :param lineno: Unused. :type lineno: int :returns: The range of line numbers that this node belongs to, :rtype: tuple(int, int) """ return self.fromlineno, self.tolineno def pytype(self): """Get the name of the type that this node represents. :returns: The name of the type. :rtype: str """ if self.newstyle: return '%s.type' % BUILTINS return '%s.classobj' % BUILTINS def display_type(self): """A human readable type of this node. :returns: The type of this node. :rtype: str """ return 'Class' def callable(self): """Whether this node defines something that is callable. :returns: True if this defines something that is callable, False otherwise. For a :class:`ClassDef` this is always ``True``. :rtype: bool """ return True def is_subtype_of(self, type_name, context=None): """Whether this class is a subtype of the given type. :param type_name: The name of the type of check against. :type type_name: str :returns: True if this class is a subtype of the given type, False otherwise. :rtype: bool """ if self.qname() == type_name: return True for anc in self.ancestors(context=context): if anc.qname() == type_name: return True return False def _infer_type_call(self, caller, context): name_node = next(caller.args[0].infer(context)) if (isinstance(name_node, node_classes.Const) and isinstance(name_node.value, str)): name = name_node.value else: return util.Uninferable result = ClassDef(name, None) # Get the bases of the class. class_bases = next(caller.args[1].infer(context)) if isinstance(class_bases, (node_classes.Tuple, node_classes.List)): result.bases = class_bases.itered() else: # There is currently no AST node that can represent an 'unknown' # node (Uninferable is not an AST node), therefore we simply return Uninferable here # although we know at least the name of the class. return util.Uninferable # Get the members of the class try: members = next(caller.args[2].infer(context)) except exceptions.InferenceError: members = None if members and isinstance(members, node_classes.Dict): for attr, value in members.items: if (isinstance(attr, node_classes.Const) and isinstance(attr.value, str)): result.locals[attr.value] = [value] result.parent = caller.parent return result def infer_call_result(self, caller, context=None): """infer what a class is returning when called""" if (self.is_subtype_of('%s.type' % (BUILTINS,), context) and len(caller.args) == 3): result = self._infer_type_call(caller, context) yield result return dunder_call = None try: metaclass = self.metaclass(context=context) if metaclass is not None: dunder_call = next(metaclass.igetattr("__call__", context)) except exceptions.AttributeInferenceError: pass if (dunder_call is not None and dunder_call.qname() != "builtins.type.__call__"): context = contextmod.bind_context_to_node(context, self) yield from dunder_call.infer_call_result( caller, context) else: # Call type.__call__ if not set metaclass # (since type is the default metaclass) yield bases.Instance(self) def scope_lookup(self, node, name, offset=0): """Lookup where the given name is assigned. :param node: The node to look for assignments up to. Any assignments after the given node are ignored. :type node: NodeNG :param name: The name to find assignments for. :type name: str :param offset: The line offset to filter statements up to. :type offset: int :returns: This scope node and the list of assignments associated to the given name according to the scope where it has been found (locals, globals or builtin). :rtype: tuple(str, list(NodeNG)) """ # If the name looks like a builtin name, just try to look # into the upper scope of this class. We might have a # decorator that it's poorly named after a builtin object # inside this class. lookup_upper_frame = ( isinstance(node.parent, node_classes.Decorators) and name in MANAGER.astroid_cache[builtins.__name__] ) if any(node == base or base.parent_of(node) for base in self.bases) or lookup_upper_frame: # Handle the case where we have either a name # in the bases of a class, which exists before # the actual definition or the case where we have # a Getattr node, with that name. # # name = ... # class A(name): # def name(self): ... # # import name # class A(name.Name): # def name(self): ... frame = self.parent.frame() # line offset to avoid that class A(A) resolve the ancestor to # the defined class offset = -1 else: frame = self return frame._scope_lookup(node, name, offset) @property def basenames(self): """The names of the parent classes Names are given in the order they appear in the class definition. :type: list(str) """ return [bnode.as_string() for bnode in self.bases] def ancestors(self, recurs=True, context=None): """Iterate over the base classes in prefixed depth first order. :param recurs: Whether to recurse or return direct ancestors only. :type recurs: bool :returns: The base classes :rtype: iterable(NodeNG) """ # FIXME: should be possible to choose the resolution order # FIXME: inference make infinite loops possible here yielded = {self} if context is None: context = contextmod.InferenceContext() if not self.bases and self.qname() != 'builtins.object': yield builtin_lookup("object")[1][0] return for stmt in self.bases: with context.restore_path(): try: for baseobj in stmt.infer(context): if not isinstance(baseobj, ClassDef): if isinstance(baseobj, bases.Instance): baseobj = baseobj._proxied else: continue if not baseobj.hide: if baseobj in yielded: continue yielded.add(baseobj) yield baseobj if recurs: for grandpa in baseobj.ancestors(recurs=True, context=context): if grandpa is self: # This class is the ancestor of itself. break if grandpa in yielded: continue yielded.add(grandpa) yield grandpa except exceptions.InferenceError: continue def local_attr_ancestors(self, name, context=None): """Iterate over the parents that define the given name. :param name: The name to find definitions for. :type name: str :returns: The parents that define the given name. :rtype: iterable(NodeNG) """ if self.newstyle and all(n.newstyle for n in self.ancestors(context)): # Look up in the mro if we can. This will result in the # attribute being looked up just as Python does it. try: ancestors = self.mro(context)[1:] except exceptions.MroError: # Fallback to use ancestors, we can't determine # a sane MRO. ancestors = self.ancestors(context=context) else: ancestors = self.ancestors(context=context) for astroid in ancestors: if name in astroid: yield astroid def instance_attr_ancestors(self, name, context=None): """Iterate over the parents that define the given name as an attribute. :param name: The name to find definitions for. :type name: str :returns: The parents that define the given name as an instance attribute. :rtype: iterable(NodeNG) """ for astroid in self.ancestors(context=context): if name in astroid.instance_attrs: yield astroid def has_base(self, node): """Whether this class directly inherits from the given node. :param node: The node to check for. :type node: NodeNG :returns: True if this class directly inherits from the given node. :rtype: bool """ return node in self.bases def local_attr(self, name, context=None): """Get the list of assign nodes associated to the given name. Assignments are looked for in both this class and in parents. :returns: The list of assignments to the given name. :rtype: list(NodeNG) :raises AttributeInferenceError: If no attribute with this name can be found in this class or parent classes. """ result = [] if name in self.locals: result = self.locals[name] else: class_node = next(self.local_attr_ancestors(name, context), ()) if class_node: result = class_node.locals[name] result = [n for n in result if not isinstance(n, node_classes.DelAttr)] if result: return result raise exceptions.AttributeInferenceError(target=self, attribute=name, context=context) def instance_attr(self, name, context=None): """Get the list of nodes associated to the given attribute name. Assignments are looked for in both this class and in parents. :returns: The list of assignments to the given name. :rtype: list(NodeNG) :raises AttributeInferenceError: If no attribute with this name can be found in this class or parent classes. """ # Return a copy, so we don't modify self.instance_attrs, # which could lead to infinite loop. values = list(self.instance_attrs.get(name, [])) # get all values from parents for class_node in self.instance_attr_ancestors(name, context): values += class_node.instance_attrs[name] values = [n for n in values if not isinstance(n, node_classes.DelAttr)] if values: return values raise exceptions.AttributeInferenceError(target=self, attribute=name, context=context) def instantiate_class(self): """Get an :class:`Instance` of the :class:`ClassDef` node. :returns: An :class:`Instance` of the :class:`ClassDef` node, or self if this is not possible. :rtype: Instance or ClassDef """ return bases.Instance(self) def getattr(self, name, context=None, class_context=True): """Get an attribute from this class, using Python's attribute semantic. This method doesn't look in the :attr:`instance_attrs` dictionary since it is done by an :class:`Instance` proxy at inference time. It may return an :class:`Uninferable` object if the attribute has not been found, but a ``__getattr__`` or ``__getattribute__`` method is defined. If ``class_context`` is given, then it is considered that the attribute is accessed from a class context, e.g. ClassDef.attribute, otherwise it might have been accessed from an instance as well. If ``class_context`` is used in that case, then a lookup in the implicit metaclass and the explicit metaclass will be done. :param name: The attribute to look for. :type name: str :param class_context: Whether the attribute can be accessed statically. :type class_context: bool :returns: The attribute. :rtype: list(NodeNG) :raises AttributeInferenceError: If the attribute cannot be inferred. """ values = self.locals.get(name, []) if name in self.special_attributes and class_context and not values: result = [self.special_attributes.lookup(name)] if name == '__bases__': # Need special treatment, since they are mutable # and we need to return all the values. result += values return result # don't modify the list in self.locals! values = list(values) for classnode in self.ancestors(recurs=True, context=context): values += classnode.locals.get(name, []) if class_context: values += self._metaclass_lookup_attribute(name, context) if not values: raise exceptions.AttributeInferenceError(target=self, attribute=name, context=context) return values def _metaclass_lookup_attribute(self, name, context): """Search the given name in the implicit and the explicit metaclass.""" attrs = set() implicit_meta = self.implicit_metaclass() metaclass = self.metaclass() for cls in {implicit_meta, metaclass}: if cls and cls != self and isinstance(cls, ClassDef): cls_attributes = self._get_attribute_from_metaclass( cls, name, context) attrs.update(set(cls_attributes)) return attrs def _get_attribute_from_metaclass(self, cls, name, context): try: attrs = cls.getattr(name, context=context, class_context=True) except exceptions.AttributeInferenceError: return for attr in bases._infer_stmts(attrs, context, frame=cls): if not isinstance(attr, FunctionDef): yield attr continue if bases._is_property(attr): yield from attr.infer_call_result(self, context) continue if attr.type == 'classmethod': # If the method is a classmethod, then it will # be bound to the metaclass, not to the class # from where the attribute is retrieved. # get_wrapping_class could return None, so just # default to the current class. frame = get_wrapping_class(attr) or self yield bases.BoundMethod(attr, frame) elif attr.type == 'staticmethod': yield attr else: yield bases.BoundMethod(attr, self) def igetattr(self, name, context=None, class_context=True): """Infer the possible values of the given variable. :param name: The name of the variable to infer. :type name: str :returns: The inferred possible values. :rtype: iterable(NodeNG or Uninferable) """ # set lookup name since this is necessary to infer on import nodes for # instance context = contextmod.copy_context(context) context.lookupname = name try: attr = self.getattr(name, context, class_context=class_context)[0] for inferred in bases._infer_stmts([attr], context, frame=self): # yield Uninferable object instead of descriptors when necessary if (not isinstance(inferred, node_classes.Const) and isinstance(inferred, bases.Instance)): try: inferred._proxied.getattr('__get__', context) except exceptions.AttributeInferenceError: yield inferred else: yield util.Uninferable else: yield function_to_method(inferred, self) except exceptions.AttributeInferenceError as error: if not name.startswith('__') and self.has_dynamic_getattr(context): # class handle some dynamic attributes, return a Uninferable object yield util.Uninferable else: raise exceptions.InferenceError( error.message, target=self, attribute=name, context=context, ) def has_dynamic_getattr(self, context=None): """Check if the class has a custom __getattr__ or __getattribute__. If any such method is found and it is not from builtins, nor from an extension module, then the function will return True. :returns: True if the class has a custom __getattr__ or __getattribute__, False otherwise. :rtype: bool """ def _valid_getattr(node): root = node.root() return root.name != BUILTINS and getattr(root, 'pure_python', None) try: return _valid_getattr(self.getattr('__getattr__', context)[0]) except exceptions.AttributeInferenceError: #if self.newstyle: XXX cause an infinite recursion error try: getattribute = self.getattr('__getattribute__', context)[0] return _valid_getattr(getattribute) except exceptions.AttributeInferenceError: pass return False def getitem(self, index, context=None): """Return the inference of a subscript. This is basically looking up the method in the metaclass and calling it. :returns: The inferred value of a subscript to this class. :rtype: NodeNG :raises AstroidTypeError: If this class does not define a ``__getitem__`` method. """ try: methods = dunder_lookup.lookup(self, '__getitem__') except exceptions.AttributeInferenceError as exc: raise exceptions.AstroidTypeError(node=self, context=context) from exc method = methods[0] # Create a new callcontext for providing index as an argument. new_context = contextmod.bind_context_to_node(context, self) new_context.callcontext = contextmod.CallContext(args=[index]) return next(method.infer_call_result(self, new_context)) def methods(self): """Iterate over all of the method defined in this class and its parents. :returns: The methods defined on the class. :rtype: iterable(FunctionDef) """ done = {} for astroid in itertools.chain(iter((self,)), self.ancestors()): for meth in astroid.mymethods(): if meth.name in done: continue done[meth.name] = None yield meth def mymethods(self): """Iterate over all of the method defined in this class only. :returns: The methods defined on the class. :rtype: iterable(FunctionDef) """ for member in self.values(): if isinstance(member, FunctionDef): yield member def implicit_metaclass(self): """Get the implicit metaclass of the current class. For newstyle classes, this will return an instance of builtins.type. For oldstyle classes, it will simply return None, since there's no implicit metaclass there. :returns: The metaclass. :rtype: builtins.type or None """ if self.newstyle: return builtin_lookup('type')[1][0] return None _metaclass = None def declared_metaclass(self, context=None): """Return the explicit declared metaclass for the current class. An explicit declared metaclass is defined either by passing the ``metaclass`` keyword argument in the class definition line (Python 3) or (Python 2) by having a ``__metaclass__`` class attribute, or if there are no explicit bases but there is a global ``__metaclass__`` variable. :returns: The metaclass of this class, or None if one could not be found. :rtype: NodeNG or None """ for base in self.bases: try: for baseobj in base.infer(context=context): if isinstance(baseobj, ClassDef) and baseobj.hide: self._metaclass = baseobj._metaclass self._metaclass_hack = True break except exceptions.InferenceError: pass if self._metaclass: # Expects this from Py3k TreeRebuilder try: return next(node for node in self._metaclass.infer(context=context) if node is not util.Uninferable) except (exceptions.InferenceError, StopIteration): return None return None def _find_metaclass(self, seen=None, context=None): if seen is None: seen = set() seen.add(self) klass = self.declared_metaclass(context=context) if klass is None: for parent in self.ancestors(context=context): if parent not in seen: klass = parent._find_metaclass(seen) if klass is not None: break return klass def metaclass(self, context=None): """Get the metaclass of this class. If this class does not define explicitly a metaclass, then the first defined metaclass in ancestors will be used instead. :returns: The metaclass of this class. :rtype: NodeNG or None """ return self._find_metaclass(context=context) def has_metaclass_hack(self): return self._metaclass_hack def _islots(self): """ Return an iterator with the inferred slots. """ if '__slots__' not in self.locals: return None for slots in self.igetattr('__slots__'): # check if __slots__ is a valid type for meth in ITER_METHODS: try: slots.getattr(meth) break except exceptions.AttributeInferenceError: continue else: continue if isinstance(slots, node_classes.Const): # a string. Ignore the following checks, # but yield the node, only if it has a value if slots.value: yield slots continue if not hasattr(slots, 'itered'): # we can't obtain the values, maybe a .deque? continue if isinstance(slots, node_classes.Dict): values = [item[0] for item in slots.items] else: values = slots.itered() if values is util.Uninferable: continue if not values: # Stop the iteration, because the class # has an empty list of slots. return values for elt in values: try: for inferred in elt.infer(): if inferred is util.Uninferable: continue if (not isinstance(inferred, node_classes.Const) or not isinstance(inferred.value, str)): continue if not inferred.value: continue yield inferred except exceptions.InferenceError: continue return None def _slots(self): if not self.newstyle: raise NotImplementedError( "The concept of slots is undefined for old-style classes.") slots = self._islots() try: first = next(slots) except StopIteration as exc: # The class doesn't have a __slots__ definition or empty slots. if exc.args and exc.args[0] not in ('', None): return exc.args[0] return None return [first] + list(slots) # Cached, because inferring them all the time is expensive @decorators_mod.cached def slots(self): """Get all the slots for this node. :returns: The names of slots for this class. If the class doesn't define any slot, through the ``__slots__`` variable, then this function will return a None. Also, it will return None in the case the slots were not inferred. :rtype: list(str) or None """ def grouped_slots(): # Not interested in object, since it can't have slots. for cls in self.mro()[:-1]: try: cls_slots = cls._slots() except NotImplementedError: continue if cls_slots is not None: yield from cls_slots else: yield None if not self.newstyle: raise NotImplementedError( "The concept of slots is undefined for old-style classes.") slots = list(grouped_slots()) if not all(slot is not None for slot in slots): return None return sorted(slots, key=lambda item: item.value) def _inferred_bases(self, context=None): # Similar with .ancestors, but the difference is when one base is inferred, # only the first object is wanted. That's because # we aren't interested in superclasses, as in the following # example: # # class SomeSuperClass(object): pass # class SomeClass(SomeSuperClass): pass # class Test(SomeClass): pass # # Inferring SomeClass from the Test's bases will give # us both SomeClass and SomeSuperClass, but we are interested # only in SomeClass. if context is None: context = contextmod.InferenceContext() if not self.bases and self.qname() != 'builtins.object': yield builtin_lookup("object")[1][0] return for stmt in self.bases: try: baseobj = next(stmt.infer(context=context)) except exceptions.InferenceError: continue if isinstance(baseobj, bases.Instance): baseobj = baseobj._proxied if not isinstance(baseobj, ClassDef): continue if not baseobj.hide: yield baseobj else: yield from baseobj.bases def _compute_mro(self, context=None): inferred_bases = list(self._inferred_bases(context=context)) bases_mro = [] for base in inferred_bases: if base is self: continue try: mro = base._compute_mro(context=context) bases_mro.append(mro) except NotImplementedError: # Some classes have in their ancestors both newstyle and # old style classes. For these we can't retrieve the .mro, # although in Python it's possible, since the class we are # currently working is in fact new style. # So, we fallback to ancestors here. ancestors = list(base.ancestors(context=context)) bases_mro.append(ancestors) unmerged_mro = ([[self]] + bases_mro + [inferred_bases]) _verify_duplicates_mro(unmerged_mro, self, context) return _c3_merge(unmerged_mro, self, context) def mro(self, context=None): """Get the method resolution order, using C3 linearization. :returns: The list of ancestors, sorted by the mro. :rtype: list(NodeNG) :raises NotImplementedError: If this is an old style class, since they don't have the concept of an MRO. :raises DuplicateBasesError: Duplicate bases in the same class base :raises InconsistentMroError: A class' MRO is inconsistent """ if not self.newstyle: raise NotImplementedError( "Could not obtain mro for old-style classes.") return self._compute_mro(context=context) def bool_value(self): """Determine the boolean value of this node. :returns: The boolean value of this node. For a :class:`ClassDef` this is always ``True``. :rtype: bool """ return True def get_children(self): if self.decorators is not None: yield self.decorators yield from self.bases yield from self.body def _get_assign_nodes(self): for child_node in self.body: yield from child_node._get_assign_nodes() astroid-2.0.1/astroid/test_utils.py0000644000076500000240000000431413324063433020151 0ustar claudiustaff00000000000000# Copyright (c) 2013-2014 Google, Inc. # Copyright (c) 2014 LOGILAB S.A. (Paris, FRANCE) # Copyright (c) 2015-2016, 2018 Claudiu Popa # Copyright (c) 2015-2016 Ceridwen # Copyright (c) 2016 Jakub Wilk # Copyright (c) 2018 Anthony Sottile # Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html # For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER """Utility functions for test code that uses astroid ASTs as input.""" import contextlib import functools import sys import warnings from astroid import nodes def require_version(minver=None, maxver=None): """ Compare version of python interpreter to the given one. Skip the test if older. """ def parse(string, default=None): string = string or default try: return tuple(int(v) for v in string.split('.')) except ValueError as exc: raise ValueError( '{string} is not a correct version : should be X.Y[.Z].'.format(string=string) ) from exc def check_require_version(f): current = sys.version_info[:3] if parse(minver, "0") < current <= parse(maxver, "4"): return f str_version = '.'.join(str(v) for v in sys.version_info) @functools.wraps(f) def new_f(self, *args, **kwargs): if minver is not None: self.skipTest('Needs Python > %s. Current version is %s.' % (minver, str_version)) elif maxver is not None: self.skipTest('Needs Python <= %s. Current version is %s.' % (maxver, str_version)) return new_f return check_require_version def get_name_node(start_from, name, index=0): return [n for n in start_from.nodes_of_class(nodes.Name) if n.name == name][index] @contextlib.contextmanager def enable_warning(warning): warnings.simplefilter('always', warning) try: yield finally: # Reset it to default value, so it will take # into account the values from the -W flag. warnings.simplefilter('default', warning) astroid-2.0.1/astroid/tests/0000755000076500000240000000000013324065077016546 5ustar claudiustaff00000000000000astroid-2.0.1/astroid/tests/__init__.py0000644000076500000240000000000013324063433020637 0ustar claudiustaff00000000000000astroid-2.0.1/astroid/tests/resources.py0000644000076500000240000000377413324063433021137 0ustar claudiustaff00000000000000# Copyright (c) 2014 Google, Inc. # Copyright (c) 2015-2016 Claudiu Popa # Copyright (c) 2016 Ceridwen # Copyright (c) 2018 Nick Drozd # Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html # For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER import os import sys import pkg_resources from astroid import builder from astroid import MANAGER from astroid.bases import BUILTINS from astroid import tests DATA_DIR = os.path.join('testdata', 'python{}'.format(sys.version_info[0])) RESOURCE_PATH = os.path.join(tests.__path__[0], DATA_DIR, 'data') def find(name): return pkg_resources.resource_filename( 'astroid.tests', os.path.normpath(os.path.join(DATA_DIR, name))) def build_file(path, modname=None): return builder.AstroidBuilder().file_build(find(path), modname) class SysPathSetup: def setUp(self): sys.path.insert(0, find('')) def tearDown(self): del sys.path[0] datadir = find('') for key in list(sys.path_importer_cache): if key.startswith(datadir): del sys.path_importer_cache[key] class AstroidCacheSetupMixin: """Mixin for handling the astroid cache problems. When clearing the astroid cache, some tests fails due to cache inconsistencies, where some objects had a different builtins object referenced. This saves the builtins module and makes sure to add it back to the astroid_cache after the tests finishes. The builtins module is special, since some of the transforms for a couple of its objects (str, bytes etc) are executed only once, so astroid_bootstrapping will be useless for retrieving the original builtins module. """ @classmethod def setUpClass(cls): cls._builtins = MANAGER.astroid_cache.get(BUILTINS) @classmethod def tearDownClass(cls): if cls._builtins: MANAGER.astroid_cache[BUILTINS] = cls._builtins astroid-2.0.1/astroid/tests/testdata/0000755000076500000240000000000013324065077020357 5ustar claudiustaff00000000000000astroid-2.0.1/astroid/tests/testdata/python2/0000755000076500000240000000000013324065077021762 5ustar claudiustaff00000000000000astroid-2.0.1/astroid/tests/testdata/python2/data/0000755000076500000240000000000013324065077022673 5ustar claudiustaff00000000000000astroid-2.0.1/astroid/tests/testdata/python2/data/__init__.py0000644000076500000240000000010413324063433024771 0ustar claudiustaff00000000000000__revision__="$Id: __init__.py,v 1.1 2005-06-13 20:55:20 syt Exp $" astroid-2.0.1/astroid/tests/testdata/python2/data/absimp/0000755000076500000240000000000013324065077024146 5ustar claudiustaff00000000000000astroid-2.0.1/astroid/tests/testdata/python2/data/absimp/__init__.py0000644000076500000240000000013113324063433026244 0ustar claudiustaff00000000000000"""a package with absolute import activated """ from __future__ import absolute_import astroid-2.0.1/astroid/tests/testdata/python2/data/absimp/sidepackage/0000755000076500000240000000000013324065077026406 5ustar claudiustaff00000000000000astroid-2.0.1/astroid/tests/testdata/python2/data/absimp/sidepackage/__init__.py0000644000076500000240000000005213324063433030506 0ustar claudiustaff00000000000000"""a side package with nothing in it """ astroid-2.0.1/astroid/tests/testdata/python2/data/absimp/string.py0000644000076500000240000000012313324063433026014 0ustar claudiustaff00000000000000from __future__ import absolute_import, print_function import string print(string) astroid-2.0.1/astroid/tests/testdata/python2/data/absimport.py0000644000076500000240000000011613324063433025235 0ustar claudiustaff00000000000000from __future__ import absolute_import import email from email import message astroid-2.0.1/astroid/tests/testdata/python2/data/all.py0000644000076500000240000000015213324063433024005 0ustar claudiustaff00000000000000 name = 'a' _bla = 2 other = 'o' class Aaa: pass def func(): print 'yo' __all__ = 'Aaa', '_bla', 'name' astroid-2.0.1/astroid/tests/testdata/python2/data/appl/0000755000076500000240000000000013324065077023627 5ustar claudiustaff00000000000000astroid-2.0.1/astroid/tests/testdata/python2/data/appl/__init__.py0000644000076500000240000000001513324063433025726 0ustar claudiustaff00000000000000""" Init """ astroid-2.0.1/astroid/tests/testdata/python2/data/appl/myConnection.py0000644000076500000240000000045713324063433026646 0ustar claudiustaff00000000000000from __future__ import print_function from data import SSL1 class MyConnection(SSL1.Connection): """An SSL connection.""" def __init__(self, dummy): print('MyConnection init') if __name__ == '__main__': myConnection = MyConnection(' ') raw_input('Press Enter to continue...') astroid-2.0.1/astroid/tests/testdata/python2/data/contribute_to_namespace/0000755000076500000240000000000013324065077027567 5ustar claudiustaff00000000000000astroid-2.0.1/astroid/tests/testdata/python2/data/contribute_to_namespace/namespace_pep_420/0000755000076500000240000000000013324065077032754 5ustar claudiustaff00000000000000././@LongLink0000000000000000000000000000015100000000000011212 Lustar 00000000000000astroid-2.0.1/astroid/tests/testdata/python2/data/contribute_to_namespace/namespace_pep_420/submodule.pyastroid-2.0.1/astroid/tests/testdata/python2/data/contribute_to_namespace/namespace_pep_420/submodul0000644000076500000240000000001013324063433034512 0ustar claudiustaff00000000000000var = 42astroid-2.0.1/astroid/tests/testdata/python2/data/descriptor_crash.py0000644000076500000240000000033113324063433026572 0ustar claudiustaff00000000000000 import urllib class Page(object): _urlOpen = staticmethod(urllib.urlopen) def getPage(self, url): handle = self._urlOpen(url) data = handle.read() handle.close() return data astroid-2.0.1/astroid/tests/testdata/python2/data/email.py0000644000076500000240000000010613324063433024323 0ustar claudiustaff00000000000000"""fake email module to test absolute import doesn't grab this one""" astroid-2.0.1/astroid/tests/testdata/python2/data/find_test/0000755000076500000240000000000013324065077024652 5ustar claudiustaff00000000000000astroid-2.0.1/astroid/tests/testdata/python2/data/find_test/__init__.py0000644000076500000240000000000013324063433026743 0ustar claudiustaff00000000000000astroid-2.0.1/astroid/tests/testdata/python2/data/find_test/module.py0000644000076500000240000000000013324063433026471 0ustar claudiustaff00000000000000astroid-2.0.1/astroid/tests/testdata/python2/data/find_test/module2.py0000644000076500000240000000000013324063433026553 0ustar claudiustaff00000000000000astroid-2.0.1/astroid/tests/testdata/python2/data/find_test/noendingnewline.py0000644000076500000240000000000013324063433030367 0ustar claudiustaff00000000000000astroid-2.0.1/astroid/tests/testdata/python2/data/find_test/nonregr.py0000644000076500000240000000000013324063433026656 0ustar claudiustaff00000000000000astroid-2.0.1/astroid/tests/testdata/python2/data/foogle/0000755000076500000240000000000013324065077024146 5ustar claudiustaff00000000000000astroid-2.0.1/astroid/tests/testdata/python2/data/foogle/fax/0000755000076500000240000000000013324065077024724 5ustar claudiustaff00000000000000astroid-2.0.1/astroid/tests/testdata/python2/data/foogle/fax/__init__.py0000644000076500000240000000000013324063433027015 0ustar claudiustaff00000000000000astroid-2.0.1/astroid/tests/testdata/python2/data/foogle/fax/a.py0000644000076500000240000000000513324063433025503 0ustar claudiustaff00000000000000x = 1astroid-2.0.1/astroid/tests/testdata/python2/data/foogle_fax-0.12.5-py2.7-nspkg.pth0000644000076500000240000000116513324063433030234 0ustar claudiustaff00000000000000import sys, types, os;p = os.path.join(sys._getframe(1).f_locals['sitedir'], *('foogle',));ie = os.path.exists(os.path.join(p,'__init__.py'));m = not ie and sys.modules.setdefault('foogle', types.ModuleType('foogle'));mp = (m or []) and m.__dict__.setdefault('__path__',[]);(p not in mp) and mp.append(p) import sys, types, os;p = os.path.join(sys._getframe(1).f_locals['sitedir'], *('foogle','crank'));ie = os.path.exists(os.path.join(p,'__init__.py'));m = not ie and sys.modules.setdefault('foogle.crank', types.ModuleType('foogle.crank'));mp = (m or []) and m.__dict__.setdefault('__path__',[]);(p not in mp) and mp.append(p) astroid-2.0.1/astroid/tests/testdata/python2/data/format.py0000644000076500000240000000064513324063433024534 0ustar claudiustaff00000000000000"""A multiline string """ function('aeozrijz\ earzer', hop) # XXX write test x = [i for i in range(5) if i % 4] fonction(1, 2, 3, 4) def definition(a, b, c): return a + b + c class debile(dict, object): pass if aaaa: pass else: aaaa,bbbb = 1,2 aaaa,bbbb = bbbb,aaaa # XXX write test hop = \ aaaa __revision__.lower(); astroid-2.0.1/astroid/tests/testdata/python2/data/invalid_encoding.py0000644000076500000240000000002613324063433026531 0ustar claudiustaff00000000000000# -*- coding: lala -*-astroid-2.0.1/astroid/tests/testdata/python2/data/lmfp/0000755000076500000240000000000013324065077023631 5ustar claudiustaff00000000000000astroid-2.0.1/astroid/tests/testdata/python2/data/lmfp/__init__.py0000644000076500000240000000006313324063433025733 0ustar claudiustaff00000000000000# force a "direct" python import from . import foo astroid-2.0.1/astroid/tests/testdata/python2/data/lmfp/foo.py0000644000076500000240000000025213324063433024757 0ustar claudiustaff00000000000000import sys if not getattr(sys, 'bar', None): sys.just_once = [] # there used to be two numbers here because # of a load_module_from_path bug sys.just_once.append(42) astroid-2.0.1/astroid/tests/testdata/python2/data/module.py0000644000076500000240000000336713324063433024535 0ustar claudiustaff00000000000000"""test module for astroid """ __revision__ = '$Id: module.py,v 1.2 2005-11-02 11:56:54 syt Exp $' from astroid.node_classes import Name as NameNode from astroid import modutils from astroid.utils import * import os.path MY_DICT = {} def global_access(key, val): """function test""" local = 1 MY_DICT[key] = val for i in val: if i: del MY_DICT[i] continue else: break else: return local class YO: """hehe haha""" a = 1 def __init__(self): try: self.yo = 1 except ValueError, ex: pass except (NameError, TypeError): raise XXXError() except: raise class YOUPI(YO): class_attr = None def __init__(self): self.member = None def method(self): """method test""" global MY_DICT try: MY_DICT = {} local = None autre = [a for (a, b) in MY_DICT if b] if b in autre: return b elif a in autre: return a global_access(local, val=autre) finally: return local def static_method(): """static method test""" assert MY_DICT, '???' static_method = staticmethod(static_method) def class_method(cls): """class method test""" exec a in b class_method = classmethod(class_method) def four_args(a, b, c, d): """four arguments (was nested_args)""" pass while 1: if a: break a += +1 else: b += -2 if c: d = a and (b or c) else: c = a and b or d map(lambda x, y: (y, x), a) redirect = four_args astroid-2.0.1/astroid/tests/testdata/python2/data/module1abs/0000755000076500000240000000000013324065077024727 5ustar claudiustaff00000000000000astroid-2.0.1/astroid/tests/testdata/python2/data/module1abs/__init__.py0000644000076500000240000000016113324063433027030 0ustar claudiustaff00000000000000from __future__ import absolute_import, print_function from . import core from .core import * print(sys.version) astroid-2.0.1/astroid/tests/testdata/python2/data/module1abs/core.py0000644000076500000240000000001313324063433026215 0ustar claudiustaff00000000000000import sys astroid-2.0.1/astroid/tests/testdata/python2/data/module2.py0000644000076500000240000000354513324063433024615 0ustar claudiustaff00000000000000from data.module import YO, YOUPI import data class Specialization(YOUPI, YO): pass class Metaclass(type): pass class Interface: pass class MyIFace(Interface): pass class AnotherIFace(Interface): pass class MyException(Exception): pass class MyError(MyException): pass class AbstractClass(object): def to_override(self, whatever): raise NotImplementedError() def return_something(self, param): if param: return 'toto' return class Concrete0: __implements__ = MyIFace class Concrete1: __implements__ = (MyIFace, AnotherIFace) class Concrete2: __implements__ = (MyIFace, AnotherIFace) class Concrete23(Concrete1): pass del YO.member del YO [SYN1, SYN2] = (Concrete0, Concrete1) assert '1' b = (1 | 2) & (3 ^ 8) bb = 1 | (two | 6) ccc = one & two & three dddd = x ^ (o ^ r) exec 'c = 3' exec 'c = 3' in {}, {} def raise_string(a=2, *args, **kwargs): raise Exception, 'yo' yield 'coucou' yield a = b + 2 c = b * 2 c = b / 2 c = b // 2 c = b - 2 c = b % 2 c = b**2 c = b << 2 c = b >> 2 c = ~b c = not b d = [c] e = d[:] e = d[a:b:c] raise_string(*args, **kwargs) print >> stream, 'bonjour' print >> stream, 'salut', def make_class(any, base=data.module.YO, *args, **kwargs): """check base is correctly resolved to Concrete0""" class Aaaa(base): """dynamic class""" return Aaaa from os.path import abspath import os as myos class A: pass class A(A): pass def generator(): """A generator.""" yield def not_a_generator(): """A function that contains generator, but is not one.""" def generator(): yield genl = lambda: (yield) def with_metaclass(meta, *bases): return meta('NewBase', bases, {}) class NotMetaclass(with_metaclass(Metaclass)): pass astroid-2.0.1/astroid/tests/testdata/python2/data/MyPyPa-0.1.0-py2.5.egg0000644000076500000240000000230613324063433026036 0ustar claudiustaff00000000000000PKAy:5\Bmypypa/__init__.pyc+Id(`b .) Q@!X $ $ 4@JD||YjQqf~^|H;(I9)I)%z9yffy%z@6)9v APKAy:\sjmypypa/__init__.py/K-*ϋWUP73TPKAy:K[EGG-INFO/SOURCES.txt+N-)-+ӏ,|+*Ru3u=1$C]J*J0RR RRR+s2󲋱**/I-KPKAy:#z| EGG-INFO/top_level.txt˭,,HPKAy:2EGG-INFO/dependency_links.txtPKAy:2EGG-INFO/zip-safePKAy:SA4~EGG-INFO/PKG-INFOM-ILI,I K-*ϳR03KMR HKss*RK22ҹ> 2 c = ~b c = not b d = [c] e = d[:] e = d[a:b:c] raise_string(*args, **kwargs) print('bonjour', file=stream) print('salut', end=' ', file=stream) def make_class(any, base=data.module.YO, *args, **kwargs): """check base is correctly resolved to Concrete0""" class Aaaa(base): """dynamic class""" return Aaaa from os.path import abspath import os as myos class A: pass class A(A): pass def generator(): """A generator.""" yield def not_a_generator(): """A function that contains generator, but is not one.""" def generator(): yield genl = lambda: (yield) def with_metaclass(meta, *bases): return meta('NewBase', bases, {}) class NotMetaclass(with_metaclass(Metaclass)): pass astroid-2.0.1/astroid/tests/testdata/python3/data/MyPyPa-0.1.0-py2.5.egg0000644000076500000240000000230613324063433026037 0ustar claudiustaff00000000000000PKAy:5\Bmypypa/__init__.pyc+Id(`b .) Q@!X $ $ 4@JD||YjQqf~^|H;(I9)I)%z9yffy%z@6)9v APKAy:\sjmypypa/__init__.py/K-*ϋWUP73TPKAy:K[EGG-INFO/SOURCES.txt+N-)-+ӏ,|+*Ru3u=1$C]J*J0RR RRR+s2󲋱**/I-KPKAy:#z| EGG-INFO/top_level.txt˭,,HPKAy:2EGG-INFO/dependency_links.txtPKAy:2EGG-INFO/zip-safePKAy:SA4~EGG-INFO/PKG-INFOM-ILI,I K-*ϳR03KMR HKss*RK22ҹ # Copyright (c) 2015-2016 Ceridwen # Copyright (c) 2015 LOGILAB S.A. (Paris, FRANCE) # Copyright (c) 2015 raylu # Copyright (c) 2015 Philip Lorenz # Copyright (c) 2016 Florian Bruhin # Copyright (c) 2017 Łukasz Rogalski # Copyright (c) 2017-2018 Bryce Guinta # Copyright (c) 2017 hippo91 # Copyright (c) 2017 David Euresti # Copyright (c) 2017 Derek Gustafson # Copyright (c) 2018 Nick Drozd # Copyright (c) 2018 Anthony Sottile # Copyright (c) 2018 Ioana Tagirta # Copyright (c) 2018 Ahmed Azzaoui # Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html # For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER """Tests for basic functionality in astroid.brain.""" import queue try: import multiprocessing # pylint: disable=unused-import HAS_MULTIPROCESSING = True except ImportError: HAS_MULTIPROCESSING = False import sys import unittest try: import enum # pylint: disable=unused-import HAS_ENUM = True except ImportError: try: import enum34 as enum # pylint: disable=unused-import HAS_ENUM = True except ImportError: HAS_ENUM = False try: import nose # pylint: disable=unused-import HAS_NOSE = True except ImportError: HAS_NOSE = False try: import dateutil # pylint: disable=unused-import HAS_DATEUTIL = True except ImportError: HAS_DATEUTIL = False import pytest HAS_PYTEST = True try: import attr as attr_module # pylint: disable=unused-import HAS_ATTR = True except ImportError: HAS_ATTR = False from astroid import MANAGER from astroid import bases from astroid import builder from astroid import nodes from astroid import util from astroid import test_utils import astroid class HashlibTest(unittest.TestCase): def _assert_hashlib_class(self, class_obj): self.assertIn('update', class_obj) self.assertIn('digest', class_obj) self.assertIn('hexdigest', class_obj) self.assertIn('block_size', class_obj) self.assertIn('digest_size', class_obj) self.assertEqual(len(class_obj['__init__'].args.args), 2) self.assertEqual(len(class_obj['__init__'].args.defaults), 1) self.assertEqual(len(class_obj['update'].args.args), 2) self.assertEqual(len(class_obj['digest'].args.args), 1) self.assertEqual(len(class_obj['hexdigest'].args.args), 1) def test_hashlib(self): """Tests that brain extensions for hashlib work.""" hashlib_module = MANAGER.ast_from_module_name('hashlib') for class_name in ['md5', 'sha1']: class_obj = hashlib_module[class_name] self._assert_hashlib_class(class_obj) @test_utils.require_version(minver='3.6') def test_hashlib_py36(self): hashlib_module = MANAGER.ast_from_module_name('hashlib') for class_name in ['sha3_224', 'sha3_512', 'shake_128']: class_obj = hashlib_module[class_name] self._assert_hashlib_class(class_obj) class CollectionsDequeTests(unittest.TestCase): def _inferred_queue_instance(self): node = builder.extract_node(""" import collections q = collections.deque([]) q """) return next(node.infer()) def test_deque(self): inferred = self._inferred_queue_instance() self.assertTrue(inferred.getattr('__len__')) @test_utils.require_version(minver='3.5') def test_deque_py35methods(self): inferred = self._inferred_queue_instance() self.assertIn('copy', inferred.locals) self.assertIn('insert', inferred.locals) self.assertIn('index', inferred.locals) class OrderedDictTest(unittest.TestCase): def _inferred_ordered_dict_instance(self): node = builder.extract_node(""" import collections d = collections.OrderedDict() d """) return next(node.infer()) @test_utils.require_version(minver='3.4') def test_ordered_dict_py34method(self): inferred = self._inferred_ordered_dict_instance() self.assertIn('move_to_end', inferred.locals) class NamedTupleTest(unittest.TestCase): def test_namedtuple_base(self): klass = builder.extract_node(""" from collections import namedtuple class X(namedtuple("X", ["a", "b", "c"])): pass """) self.assertEqual( [anc.name for anc in klass.ancestors()], ['X', 'tuple', 'object']) for anc in klass.ancestors(): self.assertFalse(anc.parent is None) def test_namedtuple_inference(self): klass = builder.extract_node(""" from collections import namedtuple name = "X" fields = ["a", "b", "c"] class X(namedtuple(name, fields)): pass """) base = next(base for base in klass.ancestors() if base.name == 'X') self.assertSetEqual({"a", "b", "c"}, set(base.instance_attrs)) def test_namedtuple_inference_failure(self): klass = builder.extract_node(""" from collections import namedtuple def foo(fields): return __(namedtuple("foo", fields)) """) self.assertIs(util.Uninferable, next(klass.infer())) def test_namedtuple_advanced_inference(self): # urlparse return an object of class ParseResult, which has a # namedtuple call and a mixin as base classes result = builder.extract_node(""" import six result = __(six.moves.urllib.parse.urlparse('gopher://')) """) instance = next(result.infer()) self.assertGreaterEqual(len(instance.getattr('scheme')), 1) self.assertGreaterEqual(len(instance.getattr('port')), 1) with self.assertRaises(astroid.AttributeInferenceError): instance.getattr('foo') self.assertGreaterEqual(len(instance.getattr('geturl')), 1) self.assertEqual(instance.name, 'ParseResult') def test_namedtuple_instance_attrs(self): result = builder.extract_node(''' from collections import namedtuple namedtuple('a', 'a b c')(1, 2, 3) #@ ''') inferred = next(result.infer()) for name, attr in inferred.instance_attrs.items(): self.assertEqual(attr[0].attrname, name) def test_namedtuple_uninferable_fields(self): node = builder.extract_node(''' x = [A] * 2 from collections import namedtuple l = namedtuple('a', x) l(1) ''') inferred = next(node.infer()) self.assertIs(util.Uninferable, inferred) def test_namedtuple_access_class_fields(self): node = builder.extract_node(""" from collections import namedtuple Tuple = namedtuple("Tuple", "field other") Tuple #@ """) inferred = next(node.infer()) self.assertIn('field', inferred.locals) self.assertIn('other', inferred.locals) def test_namedtuple_rename_keywords(self): node = builder.extract_node(""" from collections import namedtuple Tuple = namedtuple("Tuple", "abc def", rename=True) Tuple #@ """) inferred = next(node.infer()) self.assertIn('abc', inferred.locals) self.assertIn('_1', inferred.locals) def test_namedtuple_rename_duplicates(self): node = builder.extract_node(""" from collections import namedtuple Tuple = namedtuple("Tuple", "abc abc abc", rename=True) Tuple #@ """) inferred = next(node.infer()) self.assertIn('abc', inferred.locals) self.assertIn('_1', inferred.locals) self.assertIn('_2', inferred.locals) def test_namedtuple_rename_uninferable(self): node = builder.extract_node(""" from collections import namedtuple Tuple = namedtuple("Tuple", "a b c", rename=UNINFERABLE) Tuple #@ """) inferred = next(node.infer()) self.assertIn('a', inferred.locals) self.assertIn('b', inferred.locals) self.assertIn('c', inferred.locals) def test_namedtuple_func_form(self): node = builder.extract_node(""" from collections import namedtuple Tuple = namedtuple(typename="Tuple", field_names="a b c", rename=UNINFERABLE) Tuple #@ """) inferred = next(node.infer()) self.assertEqual(inferred.name, 'Tuple') self.assertIn('a', inferred.locals) self.assertIn('b', inferred.locals) self.assertIn('c', inferred.locals) def test_namedtuple_func_form_args_and_kwargs(self): node = builder.extract_node(""" from collections import namedtuple Tuple = namedtuple("Tuple", field_names="a b c", rename=UNINFERABLE) Tuple #@ """) inferred = next(node.infer()) self.assertEqual(inferred.name, 'Tuple') self.assertIn('a', inferred.locals) self.assertIn('b', inferred.locals) self.assertIn('c', inferred.locals) def test_namedtuple_bases_are_actually_names_not_nodes(self): node = builder.extract_node(""" from collections import namedtuple Tuple = namedtuple("Tuple", field_names="a b c", rename=UNINFERABLE) Tuple #@ """) inferred = next(node.infer()) self.assertIsInstance(inferred, astroid.ClassDef) self.assertIsInstance(inferred.bases[0], astroid.Name) self.assertEqual(inferred.bases[0].name, 'tuple') class DefaultDictTest(unittest.TestCase): def test_1(self): node = builder.extract_node(''' from collections import defaultdict X = defaultdict(int) X[0] ''') inferred = next(node.infer()) self.assertIs(util.Uninferable, inferred) class ModuleExtenderTest(unittest.TestCase): def testExtensionModules(self): transformer = MANAGER._transform for extender, _ in transformer.transforms[nodes.Module]: n = nodes.Module('__main__', None) extender(n) @unittest.skipUnless(HAS_NOSE, "This test requires nose library.") class NoseBrainTest(unittest.TestCase): def test_nose_tools(self): methods = builder.extract_node(""" from nose.tools import assert_equal from nose.tools import assert_equals from nose.tools import assert_true assert_equal = assert_equal #@ assert_true = assert_true #@ assert_equals = assert_equals #@ """) assert_equal = next(methods[0].value.infer()) assert_true = next(methods[1].value.infer()) assert_equals = next(methods[2].value.infer()) self.assertIsInstance(assert_equal, astroid.BoundMethod) self.assertIsInstance(assert_true, astroid.BoundMethod) self.assertIsInstance(assert_equals, astroid.BoundMethod) self.assertEqual(assert_equal.qname(), 'unittest.case.TestCase.assertEqual') self.assertEqual(assert_true.qname(), 'unittest.case.TestCase.assertTrue') self.assertEqual(assert_equals.qname(), 'unittest.case.TestCase.assertEqual') class SixBrainTest(unittest.TestCase): def test_attribute_access(self): ast_nodes = builder.extract_node(''' import six six.moves.http_client #@ six.moves.urllib_parse #@ six.moves.urllib_error #@ six.moves.urllib.request #@ ''') http_client = next(ast_nodes[0].infer()) self.assertIsInstance(http_client, nodes.Module) self.assertEqual(http_client.name, 'http.client') urllib_parse = next(ast_nodes[1].infer()) self.assertIsInstance(urllib_parse, nodes.Module) self.assertEqual(urllib_parse.name, 'urllib.parse') urljoin = next(urllib_parse.igetattr('urljoin')) urlencode = next(urllib_parse.igetattr('urlencode')) self.assertIsInstance(urljoin, nodes.FunctionDef) self.assertEqual(urljoin.qname(), 'urllib.parse.urljoin') self.assertIsInstance(urlencode, nodes.FunctionDef) self.assertEqual(urlencode.qname(), 'urllib.parse.urlencode') urllib_error = next(ast_nodes[2].infer()) self.assertIsInstance(urllib_error, nodes.Module) self.assertEqual(urllib_error.name, 'urllib.error') urlerror = next(urllib_error.igetattr('URLError')) self.assertIsInstance(urlerror, nodes.ClassDef) content_too_short = next(urllib_error.igetattr('ContentTooShortError')) self.assertIsInstance(content_too_short, nodes.ClassDef) urllib_request = next(ast_nodes[3].infer()) self.assertIsInstance(urllib_request, nodes.Module) self.assertEqual(urllib_request.name, 'urllib.request') urlopen = next(urllib_request.igetattr('urlopen')) urlretrieve = next(urllib_request.igetattr('urlretrieve')) self.assertIsInstance(urlopen, nodes.FunctionDef) self.assertEqual(urlopen.qname(), 'urllib.request.urlopen') self.assertIsInstance(urlretrieve, nodes.FunctionDef) self.assertEqual(urlretrieve.qname(), 'urllib.request.urlretrieve') def test_from_imports(self): ast_node = builder.extract_node(''' from six.moves import http_client http_client.HTTPSConnection #@ ''') inferred = next(ast_node.infer()) self.assertIsInstance(inferred, nodes.ClassDef) qname = 'http.client.HTTPSConnection' self.assertEqual(inferred.qname(), qname) def test_from_submodule_imports(self): """Make sure ulrlib submodules can be imported from See PyCQA/pylint#1640 for relevant issue """ ast_node = builder.extract_node(''' from six.moves.urllib.parse import urlparse urlparse #@ ''') inferred = next(ast_node.infer()) self.assertIsInstance(inferred, nodes.FunctionDef) @unittest.skipUnless(HAS_MULTIPROCESSING, 'multiprocesing is required for this test, but ' 'on some platforms it is missing ' '(Jython for instance)') class MultiprocessingBrainTest(unittest.TestCase): def test_multiprocessing_module_attributes(self): # Test that module attributes are working, # especially on Python 3.4+, where they are obtained # from a context. module = builder.extract_node(""" import multiprocessing """) module = module.do_import_module('multiprocessing') cpu_count = next(module.igetattr('cpu_count')) if sys.version_info < (3, 4): self.assertIsInstance(cpu_count, nodes.FunctionDef) else: self.assertIsInstance(cpu_count, astroid.BoundMethod) def test_module_name(self): module = builder.extract_node(""" import multiprocessing multiprocessing.SyncManager() """) inferred_sync_mgr = next(module.infer()) module = inferred_sync_mgr.root() self.assertEqual(module.name, 'multiprocessing.managers') def test_multiprocessing_manager(self): # Test that we have the proper attributes # for a multiprocessing.managers.SyncManager module = builder.parse(""" import multiprocessing manager = multiprocessing.Manager() queue = manager.Queue() joinable_queue = manager.JoinableQueue() event = manager.Event() rlock = manager.RLock() bounded_semaphore = manager.BoundedSemaphore() condition = manager.Condition() barrier = manager.Barrier() pool = manager.Pool() list = manager.list() dict = manager.dict() value = manager.Value() array = manager.Array() namespace = manager.Namespace() """) ast_queue = next(module['queue'].infer()) self.assertEqual(ast_queue.qname(), "{}.Queue".format(queue.__name__)) joinable_queue = next(module['joinable_queue'].infer()) self.assertEqual(joinable_queue.qname(), "{}.Queue".format(queue.__name__)) event = next(module['event'].infer()) event_name = "threading.Event" self.assertEqual(event.qname(), event_name) rlock = next(module['rlock'].infer()) rlock_name = "threading._RLock" self.assertEqual(rlock.qname(), rlock_name) bounded_semaphore = next(module['bounded_semaphore'].infer()) semaphore_name = "threading.BoundedSemaphore" self.assertEqual(bounded_semaphore.qname(), semaphore_name) pool = next(module['pool'].infer()) pool_name = "multiprocessing.pool.Pool" self.assertEqual(pool.qname(), pool_name) for attr in ('list', 'dict'): obj = next(module[attr].infer()) self.assertEqual(obj.qname(), "{}.{}".format(bases.BUILTINS, attr)) array = next(module['array'].infer()) self.assertEqual(array.qname(), "array.array") manager = next(module['manager'].infer()) # Verify that we have these attributes self.assertTrue(manager.getattr('start')) self.assertTrue(manager.getattr('shutdown')) class ThreadingBrainTest(unittest.TestCase): def test_lock(self): self._test_lock_object('Lock') def test_rlock(self): self._test_lock_object('RLock') def test_semaphore(self): self._test_lock_object('Semaphore') def test_boundedsemaphore(self): self._test_lock_object('BoundedSemaphore') def _test_lock_object(self, object_name): lock_instance = builder.extract_node(""" import threading threading.{}() """.format(object_name)) inferred = next(lock_instance.infer()) self.assert_is_valid_lock(inferred) def assert_is_valid_lock(self, inferred): self.assertIsInstance(inferred, astroid.Instance) self.assertEqual(inferred.root().name, 'threading') for method in {'acquire', 'release', '__enter__', '__exit__'}: self.assertIsInstance(next(inferred.igetattr(method)), astroid.BoundMethod) @unittest.skipUnless(HAS_ENUM, 'The enum module was only added in Python 3.4. Support for ' 'older Python versions may be available through the enum34 ' 'compatibility module.') class EnumBrainTest(unittest.TestCase): def test_simple_enum(self): module = builder.parse(""" import enum class MyEnum(enum.Enum): one = "one" two = "two" def mymethod(self, x): return 5 """) enumeration = next(module['MyEnum'].infer()) one = enumeration['one'] self.assertEqual(one.pytype(), '.MyEnum.one') property_type = '{}.property'.format(bases.BUILTINS) for propname in ('name', 'value'): prop = next(iter(one.getattr(propname))) self.assertIn(property_type, prop.decoratornames()) meth = one.getattr('mymethod')[0] self.assertIsInstance(meth, astroid.FunctionDef) def test_looks_like_enum_false_positive(self): # Test that a class named Enumeration is not considered a builtin enum. module = builder.parse(''' class Enumeration(object): def __init__(self, name, enum_list): pass test = 42 ''') enumeration = module['Enumeration'] test = next(enumeration.igetattr('test')) self.assertEqual(test.value, 42) def test_enum_multiple_base_classes(self): module = builder.parse(""" import enum class Mixin: pass class MyEnum(Mixin, enum.Enum): one = 1 """) enumeration = next(module['MyEnum'].infer()) one = enumeration['one'] clazz = one.getattr('__class__')[0] self.assertTrue(clazz.is_subtype_of('.Mixin'), 'Enum instance should share base classes with generating class') def test_int_enum(self): module = builder.parse(""" import enum class MyEnum(enum.IntEnum): one = 1 """) enumeration = next(module['MyEnum'].infer()) one = enumeration['one'] clazz = one.getattr('__class__')[0] int_type = '{}.{}'.format(bases.BUILTINS, 'int') self.assertTrue(clazz.is_subtype_of(int_type), 'IntEnum based enums should be a subtype of int') def test_enum_func_form_is_class_not_instance(self): cls, instance = builder.extract_node(''' from enum import Enum f = Enum('Audience', ['a', 'b', 'c']) f #@ f(1) #@ ''') inferred_cls = next(cls.infer()) self.assertIsInstance(inferred_cls, bases.Instance) inferred_instance = next(instance.infer()) self.assertIsInstance(inferred_instance, bases.Instance) self.assertIsInstance(next(inferred_instance.igetattr('name')), nodes.Const) self.assertIsInstance(next(inferred_instance.igetattr('value')), nodes.Const) def test_enum_func_form_iterable(self): instance = builder.extract_node(''' from enum import Enum Animal = Enum('Animal', 'ant bee cat dog') Animal ''') inferred = next(instance.infer()) self.assertIsInstance(inferred, astroid.Instance) self.assertTrue(inferred.getattr('__iter__')) def test_enum_func_form_subscriptable(self): instance, name = builder.extract_node(''' from enum import Enum Animal = Enum('Animal', 'ant bee cat dog') Animal['ant'] #@ Animal['ant'].name #@ ''') instance = next(instance.infer()) self.assertIsInstance(instance, astroid.Instance) inferred = next(name.infer()) self.assertIsInstance(inferred, astroid.Const) def test_enum_func_form_has_dunder_members(self): instance = builder.extract_node(''' from enum import Enum Animal = Enum('Animal', 'ant bee cat dog') for i in Animal.__members__: i #@ ''') instance = next(instance.infer()) self.assertIsInstance(instance, astroid.Const) self.assertIsInstance(instance.value, str) def test_infer_enum_value_as_the_right_type(self): string_value, int_value = builder.extract_node(''' from enum import Enum class A(Enum): a = 'a' b = 1 A.a.value #@ A.b.value #@ ''') inferred_string = string_value.inferred() assert any(isinstance(elem, astroid.Const) and elem.value == 'a' for elem in inferred_string) inferred_int = int_value.inferred() assert any(isinstance(elem, astroid.Const) and elem.value == 1 for elem in inferred_int) @unittest.skipUnless(HAS_DATEUTIL, "This test requires the dateutil library.") class DateutilBrainTest(unittest.TestCase): def test_parser(self): module = builder.parse(""" from dateutil.parser import parse d = parse('2000-01-01') """) d_type = next(module['d'].infer()) self.assertEqual(d_type.qname(), "datetime.datetime") @unittest.skipUnless(HAS_PYTEST, "This test requires the pytest library.") class PytestBrainTest(unittest.TestCase): def test_pytest(self): ast_node = builder.extract_node(''' import pytest pytest #@ ''') module = next(ast_node.infer()) attrs = ['deprecated_call', 'warns', 'exit', 'fail', 'skip', 'importorskip', 'xfail', 'mark', 'raises', 'freeze_includes', 'set_trace', 'fixture', 'yield_fixture'] if pytest.__version__.split('.')[0] == '3': attrs += ['approx', 'register_assert_rewrite'] for attr in attrs: self.assertIn(attr, module) def streams_are_fine(): """Check if streams are being overwritten, for example, by pytest stream inference will not work if they are overwritten PY3 only """ import io for stream in (sys.stdout, sys.stderr, sys.stdin): if not isinstance(stream, io.IOBase): return False return True class IOBrainTest(unittest.TestCase): @unittest.skipUnless( streams_are_fine(), "Needs Python 3 io model / doesn't work with plain pytest." "use pytest -s for this test to work") def test_sys_streams(self): for name in {'stdout', 'stderr', 'stdin'}: node = astroid.extract_node(''' import sys sys.{} '''.format(name)) inferred = next(node.infer()) buffer_attr = next(inferred.igetattr('buffer')) self.assertIsInstance(buffer_attr, astroid.Instance) self.assertEqual(buffer_attr.name, 'BufferedWriter') raw = next(buffer_attr.igetattr('raw')) self.assertIsInstance(raw, astroid.Instance) self.assertEqual(raw.name, 'FileIO') @test_utils.require_version('3.6') class TypingBrain(unittest.TestCase): def test_namedtuple_base(self): klass = builder.extract_node(""" from typing import NamedTuple class X(NamedTuple("X", [("a", int), ("b", str), ("c", bytes)])): pass """) self.assertEqual( [anc.name for anc in klass.ancestors()], ['X', 'tuple', 'object']) for anc in klass.ancestors(): self.assertFalse(anc.parent is None) def test_namedtuple_can_correcty_access_methods(self): klass, called = builder.extract_node(""" from typing import NamedTuple class X(NamedTuple): #@ a: int b: int def as_string(self): return '%s' % self.a def as_integer(self): return 2 + 3 X().as_integer() #@ """) self.assertEqual(len(klass.getattr('as_string')), 1) inferred = next(called.infer()) self.assertIsInstance(inferred, astroid.Const) self.assertEqual(inferred.value, 5) def test_namedtuple_inference(self): klass = builder.extract_node(""" from typing import NamedTuple class X(NamedTuple("X", [("a", int), ("b", str), ("c", bytes)])): pass """) base = next(base for base in klass.ancestors() if base.name == 'X') self.assertSetEqual({"a", "b", "c"}, set(base.instance_attrs)) def test_namedtuple_inference_nonliteral(self): # Note: NamedTuples in mypy only work with literals. klass = builder.extract_node(""" from typing import NamedTuple name = "X" fields = [("a", int), ("b", str), ("c", bytes)] NamedTuple(name, fields) """) inferred = next(klass.infer()) self.assertIsInstance(inferred, astroid.Instance) self.assertEqual(inferred.qname(), "typing.NamedTuple") def test_namedtuple_instance_attrs(self): result = builder.extract_node(''' from typing import NamedTuple NamedTuple("A", [("a", int), ("b", str), ("c", bytes)])(1, 2, 3) #@ ''') inferred = next(result.infer()) for name, attr in inferred.instance_attrs.items(): self.assertEqual(attr[0].attrname, name) def test_namedtuple_simple(self): result = builder.extract_node(''' from typing import NamedTuple NamedTuple("A", [("a", int), ("b", str), ("c", bytes)]) ''') inferred = next(result.infer()) self.assertIsInstance(inferred, nodes.ClassDef) self.assertSetEqual({"a", "b", "c"}, set(inferred.instance_attrs)) def test_namedtuple_few_args(self): result = builder.extract_node(''' from typing import NamedTuple NamedTuple("A") ''') inferred = next(result.infer()) self.assertIsInstance(inferred, astroid.Instance) self.assertEqual(inferred.qname(), "typing.NamedTuple") def test_namedtuple_few_fields(self): result = builder.extract_node(''' from typing import NamedTuple NamedTuple("A", [("a",), ("b", str), ("c", bytes)]) ''') inferred = next(result.infer()) self.assertIsInstance(inferred, astroid.Instance) self.assertEqual(inferred.qname(), "typing.NamedTuple") def test_namedtuple_class_form(self): result = builder.extract_node(''' from typing import NamedTuple class Example(NamedTuple): mything: int Example(mything=1) ''') inferred = next(result.infer()) self.assertIsInstance(inferred, astroid.Instance) def test_typing_types(self): ast_nodes = builder.extract_node(""" from typing import TypeVar, Iterable, Tuple, NewType, Dict, Union TypeVar('MyTypeVar', int, float, complex) #@ Iterable[Tuple[MyTypeVar, MyTypeVar]] #@ TypeVar('AnyStr', str, bytes) #@ NewType('UserId', str) #@ Dict[str, str] #@ Union[int, str] #@ """) for node in ast_nodes: inferred = next(node.infer()) self.assertIsInstance(inferred, nodes.ClassDef, node.as_string()) class ReBrainTest(unittest.TestCase): def test_regex_flags(self): import re names = [name for name in dir(re) if name.isupper()] re_ast = MANAGER.ast_from_module_name('re') for name in names: self.assertIn(name, re_ast) self.assertEqual(next(re_ast[name].infer()).value, getattr(re, name)) @test_utils.require_version('3.6') class BrainFStrings(unittest.TestCase): def test_no_crash_on_const_reconstruction(self): node = builder.extract_node(''' max_width = 10 test1 = f'{" ":{max_width+4}}' print(f'"{test1}"') test2 = f'[{"7":>{max_width}}:0]' test2 ''') inferred = next(node.infer()) self.assertIs(inferred, util.Uninferable) @test_utils.require_version('3.6') class BrainNamedtupleAnnAssignTest(unittest.TestCase): def test_no_crash_on_ann_assign_in_namedtuple(self): node = builder.extract_node(''' from enum import Enum from typing import Optional class A(Enum): B: str = 'B' ''') inferred = next(node.infer()) self.assertIsInstance(inferred, nodes.ClassDef) class BrainUUIDTest(unittest.TestCase): def test_uuid_has_int_member(self): node = builder.extract_node(''' import uuid u = uuid.UUID('{12345678-1234-5678-1234-567812345678}') u.int ''') inferred = next(node.infer()) self.assertIsInstance(inferred, nodes.Const) @unittest.skipUnless(HAS_ATTR, "These tests require the attr library") class AttrsTest(unittest.TestCase): def test_attr_transform(self): module = astroid.parse(""" import attr @attr.s class Foo: d = attr.ib(attr.Factory(dict)) f = Foo() f.d['answer'] = 42 @attr.s(slots=True) class Bar: d = attr.ib(attr.Factory(dict)) g = Bar() g.d['answer'] = 42 """) for name in ('f', 'g'): should_be_unknown = next(module.getattr(name)[0].infer()).getattr('d')[0] self.assertIsInstance(should_be_unknown, astroid.Unknown) def test_special_attributes(self): """Make sure special attrs attributes exist""" code = """ import attr @attr.s class Foo: pass Foo() """ foo_inst = next(astroid.extract_node(code).infer()) [attr_node] = foo_inst.getattr("__attrs_attrs__") # Prevents https://github.com/PyCQA/pylint/issues/1884 assert isinstance(attr_node, nodes.Unknown) def test_dont_consider_assignments_but_without_attrs(self): code = ''' import attr class Cls: pass @attr.s class Foo: temp = Cls() temp.prop = 5 bar_thing = attr.ib(default=temp) Foo() ''' next(astroid.extract_node(code).infer()) class RandomSampleTest(unittest.TestCase): def test_inferred_successfully(self): node = astroid.extract_node(''' import random random.sample([1, 2], 2) #@ ''') inferred = next(node.infer()) self.assertIsInstance(inferred, astroid.List) elems = sorted(elem.value for elem in inferred.elts) self.assertEqual(elems, [1, 2]) class SubprocessTest(unittest.TestCase): """Test subprocess brain""" @unittest.skipIf(sys.version_info < (3, 3), reason="Python 2.7 subprocess doesnt have args") def test_subprocess_args(self): """Make sure the args attribute exists for Popen Test for https://github.com/PyCQA/pylint/issues/1860""" name = astroid.extract_node(""" import subprocess p = subprocess.Popen(['ls']) p #@ """) [inst] = name.inferred() self.assertIsInstance(next(inst.igetattr("args")), nodes.List) class TestIsinstanceInference: """Test isinstance builtin inference""" def test_type_type(self): assert _get_result("isinstance(type, type)") == "True" def test_object_type(self): assert _get_result("isinstance(object, type)") == "True" def test_type_object(self): assert _get_result("isinstance(type, object)") == "True" def test_isinstance_int_true(self): """Make sure isinstance can check builtin int types""" assert _get_result("isinstance(1, int)") == "True" def test_isinstance_int_false(self): assert _get_result("isinstance('a', int)") == "False" def test_isinstance_object_true(self): assert _get_result(""" class Bar(object): pass isinstance(Bar(), object) """) == "True" def test_isinstance_object_true3(self): assert _get_result(""" class Bar(object): pass isinstance(Bar(), Bar) """) == "True" def test_isinstance_class_false(self): assert _get_result(""" class Foo(object): pass class Bar(object): pass isinstance(Bar(), Foo) """) == "False" def test_isinstance_type_false(self): assert _get_result(""" class Bar(object): pass isinstance(Bar(), type) """) == "False" def test_isinstance_str_true(self): """Make sure isinstance can check bultin str types""" assert _get_result("isinstance('a', str)") == "True" def test_isinstance_str_false(self): assert _get_result("isinstance(1, str)") == "False" def test_isinstance_tuple_argument(self): """obj just has to be an instance of ANY class/type on the right""" assert _get_result("isinstance(1, (str, int))") == "True" def test_isinstance_type_false2(self): assert _get_result(""" isinstance(1, type) """) == "False" def test_isinstance_object_true2(self): assert _get_result(""" class Bar(type): pass mainbar = Bar("Bar", tuple(), {}) isinstance(mainbar, object) """) == "True" def test_isinstance_type_true(self): assert _get_result(""" class Bar(type): pass mainbar = Bar("Bar", tuple(), {}) isinstance(mainbar, type) """) == "True" def test_isinstance_edge_case(self): """isinstance allows bad type short-circuting""" assert _get_result("isinstance(1, (int, 1))") == "True" def test_uninferable_bad_type(self): """The second argument must be a class or a tuple of classes""" with pytest.raises(astroid.InferenceError): _get_result_node("isinstance(int, 1)") def test_uninferable_keywords(self): """isinstance does not allow keywords""" with pytest.raises(astroid.InferenceError): _get_result_node("isinstance(1, class_or_tuple=int)") def test_too_many_args(self): """isinstance must have two arguments""" with pytest.raises(astroid.InferenceError): _get_result_node("isinstance(1, int, str)") def test_first_param_is_uninferable(self): with pytest.raises(astroid.InferenceError): _get_result_node('isinstance(something, int)') class TestIssubclassBrain: """Test issubclass() builtin inference""" def test_type_type(self): assert _get_result("issubclass(type, type)") == "True" def test_object_type(self): assert _get_result("issubclass(object, type)") == "False" def test_type_object(self): assert _get_result("issubclass(type, object)") == "True" def test_issubclass_same_class(self): assert _get_result("issubclass(int, int)") == "True" def test_issubclass_not_the_same_class(self): assert _get_result("issubclass(str, int)") == "False" def test_issubclass_object_true(self): assert _get_result(""" class Bar(object): pass issubclass(Bar, object) """) == "True" def test_issubclass_same_user_defined_class(self): assert _get_result(""" class Bar(object): pass issubclass(Bar, Bar) """) == "True" def test_issubclass_different_user_defined_classes(self): assert _get_result(""" class Foo(object): pass class Bar(object): pass issubclass(Bar, Foo) """) == "False" def test_issubclass_type_false(self): assert _get_result(""" class Bar(object): pass issubclass(Bar, type) """) == "False" def test_isinstance_tuple_argument(self): """obj just has to be a subclass of ANY class/type on the right""" assert _get_result("issubclass(int, (str, int))") == "True" def test_isinstance_object_true2(self): assert _get_result(""" class Bar(type): pass issubclass(Bar, object) """) == "True" def test_issubclass_short_circuit(self): """issubclasss allows bad type short-circuting""" assert _get_result("issubclass(int, (int, 1))") == "True" def test_uninferable_bad_type(self): """The second argument must be a class or a tuple of classes""" # Should I subclass with pytest.raises(astroid.InferenceError): _get_result_node("issubclass(int, 1)") def test_uninferable_keywords(self): """issubclass does not allow keywords""" with pytest.raises(astroid.InferenceError): _get_result_node("issubclass(int, class_or_tuple=int)") def test_too_many_args(self): """issubclass must have two arguments""" with pytest.raises(astroid.InferenceError): _get_result_node("issubclass(int, int, str)") def _get_result_node(code): node = next(astroid.extract_node(code).infer()) return node def _get_result(code): return _get_result_node(code).as_string() class TestLenBuiltinInference: def test_len_list(self): # Uses .elts node = astroid.extract_node(""" len(['a','b','c']) """) node = next(node.infer()) assert node.as_string() == '3' assert isinstance(node, nodes.Const) def test_len_tuple(self): node = astroid.extract_node(""" len(('a','b','c')) """) node = next(node.infer()) assert node.as_string() == '3' def test_len_var(self): # Make sure argument is inferred node = astroid.extract_node(""" a = [1,2,'a','b','c'] len(a) """) node = next(node.infer()) assert node.as_string() == '5' def test_len_dict(self): # Uses .items node = astroid.extract_node(""" a = {'a': 1, 'b': 2} len(a) """) node = next(node.infer()) assert node.as_string() == '2' def test_len_set(self): node = astroid.extract_node(""" len({'a'}) """) inferred_node = next(node.infer()) assert inferred_node.as_string() == '1' def test_len_object(self): """Test len with objects that implement the len protocol""" node = astroid.extract_node(""" class A: def __len__(self): return 57 len(A()) """) inferred_node = next(node.infer()) assert inferred_node.as_string() == '57' def test_len_class_with_metaclass(self): """Make sure proper len method is located""" cls_node, inst_node = astroid.extract_node(""" class F2(type): def __new__(cls, name, bases, attrs): return super().__new__(cls, name, bases, {}) def __len__(self): return 57 class F(metaclass=F2): def __len__(self): return 4 len(F) #@ len(F()) #@ """) assert next(cls_node.infer()).as_string() == '57' assert next(inst_node.infer()).as_string() == '4' def test_len_object_failure(self): """If taking the length of a class, do not use an instance method""" node = astroid.extract_node(""" class F: def __len__(self): return 57 len(F) """) with pytest.raises(astroid.InferenceError): next(node.infer()) def test_len_string(self): node = astroid.extract_node(""" len("uwu") """) assert next(node.infer()).as_string() == "3" def test_len_generator_failure(self): node = astroid.extract_node(""" def gen(): yield 'a' yield 'b' len(gen()) """) with pytest.raises(astroid.InferenceError): next(node.infer()) def test_len_failure_missing_variable(self): node = astroid.extract_node(""" len(a) """) with pytest.raises(astroid.InferenceError): next(node.infer()) def test_len_bytes(self): node = astroid.extract_node(""" len(b'uwu') """) assert next(node.infer()).as_string() == '3' @pytest.mark.xfail(reason="Can't retrieve subclassed type value ") def test_int_subclass_result(self): """I am unable to figure out the value of an object which subclasses int""" node = astroid.extract_node(""" class IntSubclass(int): pass class F: def __len__(self): return IntSubclass(5) len(F()) """) assert next(node.infer()).as_string() == '5' @pytest.mark.xfail(reason="Can't use list special astroid fields") def test_int_subclass_argument(self): """I am unable to access the length of a object which subclasses list""" node = astroid.extract_node(""" class ListSubclass(list): pass len(ListSubclass([1,2,3,4,4])) """) assert next(node.infer()).as_string() == '5' def test_len_builtin_inference_attribute_error_str(self): """Make sure len builtin doesn't raise an AttributeError on instances of str or bytes See https://github.com/PyCQA/pylint/issues/1942 """ code = 'len(str("F"))' try: next(astroid.extract_node(code).infer()) except astroid.InferenceError: pass def test_infer_str(): ast_nodes = astroid.extract_node(''' str(s) #@ str('a') #@ str(some_object()) #@ ''') for node in ast_nodes: inferred = next(node.infer()) assert isinstance(inferred, astroid.Const) node = astroid.extract_node(''' str(s='') #@ ''') inferred = next(node.infer()) assert isinstance(inferred, astroid.Instance) assert inferred.qname() == 'builtins.str' def test_infer_int(): ast_nodes = astroid.extract_node(''' int(0) #@ int('1') #@ ''') for node in ast_nodes: inferred = next(node.infer()) assert isinstance(inferred, astroid.Const) ast_nodes = astroid.extract_node(''' int(s='') #@ int('2.5') #@ int('something else') #@ int(unknown) #@ int(b'a') #@ ''') for node in ast_nodes: inferred = next(node.infer()) assert isinstance(inferred, astroid.Instance) assert inferred.qname() == 'builtins.int' def test_infer_dict_from_keys(): bad_nodes = astroid.extract_node(''' dict.fromkeys() #@ dict.fromkeys(1, 2, 3) #@ dict.fromkeys(a=1) #@ ''') for node in bad_nodes: with pytest.raises(astroid.InferenceError): next(node.infer()) # Test uninferable values good_nodes = astroid.extract_node(''' from unknown import Unknown dict.fromkeys(some_value) #@ dict.fromkeys(some_other_value) #@ dict.fromkeys([Unknown(), Unknown()]) #@ dict.fromkeys([Unknown(), Unknown()]) #@ ''') for node in good_nodes: inferred = next(node.infer()) assert isinstance(inferred, astroid.Dict) assert inferred.items == [] # Test inferrable values # from a dictionary's keys from_dict = astroid.extract_node(''' dict.fromkeys({'a':2, 'b': 3, 'c': 3}) #@ ''') inferred = next(from_dict.infer()) assert isinstance(inferred, astroid.Dict) itered = inferred.itered() assert all(isinstance(elem, astroid.Const) for elem in itered) actual_values = [elem.value for elem in itered] assert sorted(actual_values) == ['a', 'b', 'c'] # from a string from_string = astroid.extract_node(''' dict.fromkeys('abc') ''') inferred = next(from_string.infer()) assert isinstance(inferred, astroid.Dict) itered = inferred.itered() assert all(isinstance(elem, astroid.Const) for elem in itered) actual_values = [elem.value for elem in itered] assert sorted(actual_values) == ['a', 'b', 'c'] # from bytes from_bytes = astroid.extract_node(''' dict.fromkeys(b'abc') ''') inferred = next(from_bytes.infer()) assert isinstance(inferred, astroid.Dict) itered = inferred.itered() assert all(isinstance(elem, astroid.Const) for elem in itered) actual_values = [elem.value for elem in itered] assert sorted(actual_values) == [97, 98, 99] # From list/set/tuple from_others = astroid.extract_node(''' dict.fromkeys(('a', 'b', 'c')) #@ dict.fromkeys(['a', 'b', 'c']) #@ dict.fromkeys({'a', 'b', 'c'}) #@ ''') for node in from_others: inferred = next(node.infer()) assert isinstance(inferred, astroid.Dict) itered = inferred.itered() assert all(isinstance(elem, astroid.Const) for elem in itered) actual_values = [elem.value for elem in itered] assert sorted(actual_values) == ['a', 'b', 'c'] class TestFunctoolsPartial: def test_invalid_functools_partial_calls(self): ast_nodes = astroid.extract_node(''' from functools import partial from unknown import Unknown def test(a, b, c): return a + b + c partial() #@ partial(test) #@ partial(func=test) #@ partial(some_func, a=1) #@ partial(Unknown, a=1) #@ partial(2, a=1) #@ partial(test, unknown=1) #@ ''') for node in ast_nodes: inferred = next(node.infer()) assert isinstance(inferred, (astroid.FunctionDef, astroid.Instance)) assert inferred.qname() in ('functools.partial', 'functools.partial.newfunc') def test_inferred_partial_function_calls(self): ast_nodes = astroid.extract_node(''' from functools import partial def test(a, b): return a + b partial(test, 1)(3) #@ partial(test, b=4)(3) #@ partial(test, b=4)(a=3) #@ def other_test(a, b, *, c=1): return (a + b) * c partial(other_test, 1, 2)() #@ partial(other_test, 1, 2)(c=4) #@ partial(other_test, c=4)(1, 3) #@ partial(other_test, 4, c=4)(4) #@ partial(other_test, 4, c=4)(b=5) #@ ''') expected_values = [ 4, 7, 7, 3, 12, 16, 32, 36, ] for node, expected_value in zip(ast_nodes, expected_values): inferred = next(node.infer()) assert isinstance(inferred, astroid.Const) assert inferred.value == expected_value if __name__ == '__main__': unittest.main() astroid-2.0.1/astroid/tests/unittest_brain_numpy.py0000644000076500000240000003451413324063433023403 0ustar claudiustaff00000000000000#-*- encoding=utf-8 -*- # Copyright (c) 2017-2018 hippo91 # Copyright (c) 2017 Claudiu Popa # Copyright (c) 2018 Bryce Guinta # Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html # For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER import unittest import contextlib try: import numpy # pylint: disable=unused-import HAS_NUMPY = True except ImportError: HAS_NUMPY = False from astroid import builder from astroid import nodes class SubTestWrapper(unittest.TestCase): """ A class for supporting all unittest version wether or not subTest is available """ def subTest(self, msg=None, **params): try: # For python versions above 3.5 this should be ok return super(SubTestWrapper, self).subTest(msg, **params) except AttributeError: # For python versions below 3.5 return subTestMock(msg) @contextlib.contextmanager def subTestMock(msg=None): """ A mock for subTest which do nothing """ yield msg @unittest.skipUnless(HAS_NUMPY, "This test requires the numpy library.") class NumpyBrainCoreUmathTest(SubTestWrapper): """ Test of all members of numpy.core.umath module """ no_arg_ufunc = ('geterrobj',) one_arg_ufunc_spec = ('seterrobj',) one_arg_ufunc = ( 'arccos', 'arccosh', 'arcsin', 'arcsinh', 'arctan', 'arctanh', 'cbrt', 'conj', 'conjugate', 'cosh', 'deg2rad', 'degrees', 'exp2', 'expm1', 'fabs', 'frexp', 'isfinite', 'isinf', 'log', 'log1p', 'log2', 'logical_not', 'modf', 'negative', 'rad2deg', 'radians', 'reciprocal', 'rint', 'sign', 'signbit', 'spacing', 'square', 'tan', 'tanh', 'trunc',) two_args_ufunc = ( 'bitwise_and', 'bitwise_or', 'bitwise_xor', 'copysign', 'divide', 'equal', 'float_power', 'floor_divide', 'fmax', 'fmin', 'fmod', 'greater', 'hypot', 'ldexp', 'left_shift', 'less', 'logaddexp', 'logaddexp2', 'logical_and', 'logical_or', 'logical_xor', 'maximum', 'minimum', 'nextafter', 'not_equal', 'power', 'remainder', 'right_shift', 'subtract', 'true_divide',) all_ufunc = no_arg_ufunc + one_arg_ufunc_spec + one_arg_ufunc + two_args_ufunc constants = ('e', 'euler_gamma') def _inferred_numpy_attribute(self, func_name): node = builder.extract_node(""" import numpy.core.umath as tested_module func = tested_module.{:s} func""".format(func_name)) return next(node.infer()) def test_numpy_core_umath_constants(self): """ Test that constants have Const type. """ for const in self.constants: with self.subTest(const=const): inferred = self._inferred_numpy_attribute(const) self.assertIsInstance(inferred, nodes.Const) def test_numpy_core_umath_constants_values(self): """ Test the values of the constants. """ exact_values = {'e': 2.718281828459045, 'euler_gamma': 0.5772156649015329} for const in self.constants: with self.subTest(const=const): inferred = self._inferred_numpy_attribute(const) self.assertEqual(inferred.value, exact_values[const]) def test_numpy_core_umath_functions(self): """ Test that functions have FunctionDef type. """ for func in self.all_ufunc: with self.subTest(func=func): inferred = self._inferred_numpy_attribute(func) self.assertIsInstance(inferred, nodes.FunctionDef) def test_numpy_core_umath_functions_no_arg(self): """ Test that functions with no arguments have really no arguments. """ for func in self.no_arg_ufunc: with self.subTest(func=func): inferred = self._inferred_numpy_attribute(func) self.assertFalse(inferred.argnames()) def test_numpy_core_umath_functions_one_arg_spec(self): """ Test the arguments names of functions. """ exact_arg_names = ['errobj'] for func in self.one_arg_ufunc_spec: with self.subTest(func=func): inferred = self._inferred_numpy_attribute(func) self.assertEqual(inferred.argnames(), exact_arg_names) def test_numpy_core_umath_functions_one_arg(self): """ Test the arguments names of functions. """ exact_arg_names = ['x', 'out', 'where', 'casting', 'order', 'dtype', 'subok'] for func in self.one_arg_ufunc: with self.subTest(func=func): inferred = self._inferred_numpy_attribute(func) self.assertEqual(inferred.argnames(), exact_arg_names) def test_numpy_core_umath_functions_two_args(self): """ Test the arguments names of functions. """ exact_arg_names = ['x1', 'x2', 'out', 'where', 'casting', 'order', 'dtype', 'subok'] for func in self.two_args_ufunc: with self.subTest(func=func): inferred = self._inferred_numpy_attribute(func) self.assertEqual(inferred.argnames(), exact_arg_names) def test_numpy_core_umath_functions_kwargs_default_values(self): """ Test the default values for keyword arguments. """ exact_kwargs_default_values = [None, True, 'same_kind', 'K', None, True] for func in self.one_arg_ufunc + self.two_args_ufunc: with self.subTest(func=func): inferred = self._inferred_numpy_attribute(func) default_args_values = [default.value for default in inferred.args.defaults] self.assertEqual(default_args_values, exact_kwargs_default_values) @unittest.skipUnless(HAS_NUMPY, "This test requires the numpy library.") class NumpyBrainRandomMtrandTest(SubTestWrapper): """ Test of all the functions of numpy.random.mtrand module. """ # Map between functions names and arguments names and default values all_mtrand = { 'beta': (['a', 'b', 'size'], [None]), 'binomial': (['n', 'p', 'size'], [None]), 'bytes': (['length',], []), 'chisquare': (['df', 'size'], [None]), 'choice': (['a', 'size', 'replace', 'p'], [None, True, None]), 'dirichlet': (['alpha', 'size'], [None]), 'exponential': (['scale', 'size'], [1.0, None]), 'f': (['dfnum', 'dfden', 'size'], [None]), 'gamma': (['shape', 'scale', 'size'], [1.0, None]), 'geometric': (['p', 'size'], [None]), 'get_state': ([], []), 'gumbel': (['loc', 'scale', 'size'], [0.0, 1.0, None]), 'hypergeometric': (['ngood', 'nbad', 'nsample', 'size'], [None]), 'laplace': (['loc', 'scale', 'size'], [0.0, 1.0, None]), 'logistic': (['loc', 'scale', 'size'], [0.0, 1.0, None]), 'lognormal': (['mean', 'sigma', 'size'], [0.0, 1.0, None]), 'logseries': (['p', 'size'], [None]), 'multinomial': (['n', 'pvals', 'size'], [None]), 'multivariate_normal': (['mean', 'cov', 'size'], [None]), 'negative_binomial': (['n', 'p', 'size'], [None]), 'noncentral_chisquare': (['df', 'nonc', 'size'], [None]), 'noncentral_f': (['dfnum', 'dfden', 'nonc', 'size'], [None]), 'normal': (['loc', 'scale', 'size'], [0.0, 1.0, None]), 'pareto': (['a', 'size'], [None]), 'permutation': (['x'], []), 'poisson': (['lam', 'size'], [1.0, None]), 'power': (['a', 'size'], [None]), 'rand': (['args'], []), 'randint': (['low', 'high', 'size', 'dtype'], [None, None, 'l']), 'randn': (['args'], []), 'random_integers': (['low', 'high', 'size'], [None, None]), 'random_sample': (['size'], [None]), 'rayleigh': (['scale', 'size'], [1.0, None]), 'seed': (['seed'], [None]), 'set_state': (['state'], []), 'shuffle': (['x'], []), 'standard_cauchy': (['size'], [None]), 'standard_exponential': (['size'], [None]), 'standard_gamma': (['shape', 'size'], [None]), 'standard_normal': (['size'], [None]), 'standard_t': (['df', 'size'], [None]), 'triangular': (['left', 'mode', 'right', 'size'], [None]), 'uniform': (['low', 'high', 'size'], [0.0, 1.0, None]), 'vonmises': (['mu', 'kappa', 'size'], [None]), 'wald': (['mean', 'scale', 'size'], [None]), 'weibull': (['a', 'size'], [None]), 'zipf': (['a', 'size'], [None])} def _inferred_numpy_attribute(self, func_name): node = builder.extract_node(""" import numpy.random.mtrand as tested_module func = tested_module.{:s} func""".format(func_name)) return next(node.infer()) def test_numpy_random_mtrand_functions(self): """ Test that all functions have FunctionDef type. """ for func in self.all_mtrand: with self.subTest(func=func): inferred = self._inferred_numpy_attribute(func) self.assertIsInstance(inferred, nodes.FunctionDef) def test_numpy_random_mtrand_functions_signature(self): """ Test the arguments names and default values. """ for func, (exact_arg_names, exact_kwargs_default_values) in self.all_mtrand.items(): with self.subTest(func=func): inferred = self._inferred_numpy_attribute(func) self.assertEqual(inferred.argnames(), exact_arg_names) default_args_values = [default.value for default in inferred.args.defaults] self.assertEqual(default_args_values, exact_kwargs_default_values) @unittest.skipUnless(HAS_NUMPY, "This test requires the numpy library.") class NumpyBrainCoreNumericTypesTest(SubTestWrapper): """ Test of all the missing types defined in numerictypes module. """ all_types = ['uint16', 'uint32', 'uint64', 'float16', 'float32', 'float64', 'float96', 'complex64', 'complex128', 'complex192', 'timedelta64', 'datetime64', 'unicode_', 'str_', 'bool_', 'bool8', 'byte', 'int8', 'bytes0', 'bytes_', 'cdouble', 'cfloat', 'character', 'clongdouble', 'clongfloat', 'complexfloating', 'csingle', 'double', 'flexible', 'floating', 'half', 'inexact', 'int0', 'longcomplex', 'longdouble', 'longfloat', 'short', 'signedinteger', 'single', 'singlecomplex', 'str0', 'ubyte', 'uint', 'uint0', 'uintc', 'uintp', 'ulonglong', 'unsignedinteger', 'ushort', 'void0'] def _inferred_numpy_attribute(self, attrib): node = builder.extract_node(""" import numpy.core.numerictypes as tested_module missing_type = tested_module.{:s}""".format(attrib)) return next(node.value.infer()) def test_numpy_core_types(self): """ Test that all defined types have ClassDef type. """ for typ in self.all_types: with self.subTest(typ=typ): inferred = self._inferred_numpy_attribute(typ) self.assertIsInstance(inferred, nodes.ClassDef) def test_generic_types_have_methods(self): """ Test that all generic derived types have specified methods """ generic_methods = ['all', 'any', 'argmax', 'argmin', 'argsort', 'astype', 'base', 'byteswap', 'choose', 'clip', 'compress', 'conj', 'conjugate', 'copy', 'cumprod', 'cumsum', 'data', 'diagonal', 'dtype', 'dump', 'dumps', 'fill', 'flags', 'flat', 'flatten', 'getfield', 'imag', 'item', 'itemset', 'itemsize', 'max', 'mean', 'min', 'nbytes', 'ndim', 'newbyteorder', 'nonzero', 'prod', 'ptp', 'put', 'ravel', 'real', 'repeat', 'reshape', 'resize', 'round', 'searchsorted', 'setfield', 'setflags', 'shape', 'size', 'sort', 'squeeze', 'std', 'strides', 'sum', 'swapaxes', 'take', 'tobytes', 'tofile', 'tolist', 'tostring', 'trace', 'transpose', 'var', 'view'] for type_ in ('bool_', 'bytes_', 'character', 'complex128', 'complex192', 'complex64', 'complexfloating', 'datetime64', 'flexible', 'float16', 'float32', 'float64', 'float96', 'floating', 'generic', 'inexact', 'int16', 'int32', 'int32', 'int64', 'int8', 'integer', 'number', 'signedinteger', 'str_', 'timedelta64', 'uint16', 'uint32', 'uint32', 'uint64', 'uint8', 'unsignedinteger', 'void'): with self.subTest(typ=type_): inferred = self._inferred_numpy_attribute(type_) for meth in generic_methods: with self.subTest(meth=meth): self.assertTrue(meth in {m.name for m in inferred.methods()}) def test_generic_types_have_attributes(self): """ Test that all generic derived types have specified attributes """ generic_attr = ['base', 'data', 'dtype', 'flags', 'flat', 'imag', 'itemsize', 'nbytes', 'ndim', 'real', 'size', 'strides'] for type_ in ('bool_', 'bytes_', 'character', 'complex128', 'complex192', 'complex64', 'complexfloating', 'datetime64', 'flexible', 'float16', 'float32', 'float64', 'float96', 'floating', 'generic', 'inexact', 'int16', 'int32', 'int32', 'int64', 'int8', 'integer', 'number', 'signedinteger', 'str_', 'timedelta64', 'uint16', 'uint32', 'uint32', 'uint64', 'uint8', 'unsignedinteger', 'void'): with self.subTest(typ=type_): inferred = self._inferred_numpy_attribute(type_) for attr in generic_attr: with self.subTest(attr=attr): self.assertNotEqual(len(inferred.getattr(attr)), 0) if __name__ == '__main__': unittest.main() astroid-2.0.1/astroid/tests/unittest_builder.py0000644000076500000240000006440013324063433022503 0ustar claudiustaff00000000000000# -*- coding: utf-8 -*- # Copyright (c) 2006-2014 LOGILAB S.A. (Paris, FRANCE) # Copyright (c) 2014-2018 Claudiu Popa # Copyright (c) 2014-2015 Google, Inc. # Copyright (c) 2015-2016 Ceridwen # Copyright (c) 2015 Florian Bruhin # Copyright (c) 2016 Jakub Wilk # Copyright (c) 2017 Bryce Guinta # Copyright (c) 2017 Łukasz Rogalski # Copyright (c) 2018 brendanator # Copyright (c) 2018 Anthony Sottile # Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html # For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER """tests for the astroid builder and rebuilder module""" import builtins import collections import os import sys import unittest from astroid import builder from astroid import exceptions from astroid import manager from astroid import nodes from astroid import test_utils from astroid import util from astroid.tests import resources MANAGER = manager.AstroidManager() BUILTINS = builtins.__name__ class FromToLineNoTest(unittest.TestCase): def setUp(self): self.astroid = resources.build_file('data/format.py') def test_callfunc_lineno(self): stmts = self.astroid.body # on line 4: # function('aeozrijz\ # earzer', hop) discard = stmts[0] self.assertIsInstance(discard, nodes.Expr) self.assertEqual(discard.fromlineno, 4) self.assertEqual(discard.tolineno, 5) callfunc = discard.value self.assertIsInstance(callfunc, nodes.Call) self.assertEqual(callfunc.fromlineno, 4) self.assertEqual(callfunc.tolineno, 5) name = callfunc.func self.assertIsInstance(name, nodes.Name) self.assertEqual(name.fromlineno, 4) self.assertEqual(name.tolineno, 4) strarg = callfunc.args[0] self.assertIsInstance(strarg, nodes.Const) if hasattr(sys, 'pypy_version_info'): lineno = 4 else: lineno = 5 # no way for this one in CPython (is 4 actually) self.assertEqual(strarg.fromlineno, lineno) self.assertEqual(strarg.tolineno, lineno) namearg = callfunc.args[1] self.assertIsInstance(namearg, nodes.Name) self.assertEqual(namearg.fromlineno, 5) self.assertEqual(namearg.tolineno, 5) # on line 10: # fonction(1, # 2, # 3, # 4) discard = stmts[2] self.assertIsInstance(discard, nodes.Expr) self.assertEqual(discard.fromlineno, 10) self.assertEqual(discard.tolineno, 13) callfunc = discard.value self.assertIsInstance(callfunc, nodes.Call) self.assertEqual(callfunc.fromlineno, 10) self.assertEqual(callfunc.tolineno, 13) name = callfunc.func self.assertIsInstance(name, nodes.Name) self.assertEqual(name.fromlineno, 10) self.assertEqual(name.tolineno, 10) for i, arg in enumerate(callfunc.args): self.assertIsInstance(arg, nodes.Const) self.assertEqual(arg.fromlineno, 10+i) self.assertEqual(arg.tolineno, 10+i) def test_function_lineno(self): stmts = self.astroid.body # on line 15: # def definition(a, # b, # c): # return a + b + c function = stmts[3] self.assertIsInstance(function, nodes.FunctionDef) self.assertEqual(function.fromlineno, 15) self.assertEqual(function.tolineno, 18) return_ = function.body[0] self.assertIsInstance(return_, nodes.Return) self.assertEqual(return_.fromlineno, 18) self.assertEqual(return_.tolineno, 18) if sys.version_info < (3, 0): self.assertEqual(function.blockstart_tolineno, 17) else: self.skipTest('FIXME http://bugs.python.org/issue10445 ' '(no line number on function args)') def test_decorated_function_lineno(self): astroid = builder.parse(''' @decorator def function( arg): print (arg) ''', __name__) function = astroid['function'] # XXX discussable, but that's what is expected by pylint right now self.assertEqual(function.fromlineno, 3) self.assertEqual(function.tolineno, 5) self.assertEqual(function.decorators.fromlineno, 2) self.assertEqual(function.decorators.tolineno, 2) if sys.version_info < (3, 0): self.assertEqual(function.blockstart_tolineno, 4) else: self.skipTest('FIXME http://bugs.python.org/issue10445 ' '(no line number on function args)') def test_class_lineno(self): stmts = self.astroid.body # on line 20: # class debile(dict, # object): # pass class_ = stmts[4] self.assertIsInstance(class_, nodes.ClassDef) self.assertEqual(class_.fromlineno, 20) self.assertEqual(class_.tolineno, 22) self.assertEqual(class_.blockstart_tolineno, 21) pass_ = class_.body[0] self.assertIsInstance(pass_, nodes.Pass) self.assertEqual(pass_.fromlineno, 22) self.assertEqual(pass_.tolineno, 22) def test_if_lineno(self): stmts = self.astroid.body # on line 20: # if aaaa: pass # else: # aaaa,bbbb = 1,2 # aaaa,bbbb = bbbb,aaaa if_ = stmts[5] self.assertIsInstance(if_, nodes.If) self.assertEqual(if_.fromlineno, 24) self.assertEqual(if_.tolineno, 27) self.assertEqual(if_.blockstart_tolineno, 24) self.assertEqual(if_.orelse[0].fromlineno, 26) self.assertEqual(if_.orelse[1].tolineno, 27) def test_for_while_lineno(self): for code in (''' for a in range(4): print (a) break else: print ("bouh") ''', ''' while a: print (a) break else: print ("bouh") '''): astroid = builder.parse(code, __name__) stmt = astroid.body[0] self.assertEqual(stmt.fromlineno, 2) self.assertEqual(stmt.tolineno, 6) self.assertEqual(stmt.blockstart_tolineno, 2) self.assertEqual(stmt.orelse[0].fromlineno, 6) # XXX self.assertEqual(stmt.orelse[0].tolineno, 6) def test_try_except_lineno(self): astroid = builder.parse(''' try: print (a) except: pass else: print ("bouh") ''', __name__) try_ = astroid.body[0] self.assertEqual(try_.fromlineno, 2) self.assertEqual(try_.tolineno, 7) self.assertEqual(try_.blockstart_tolineno, 2) self.assertEqual(try_.orelse[0].fromlineno, 7) # XXX self.assertEqual(try_.orelse[0].tolineno, 7) hdlr = try_.handlers[0] self.assertEqual(hdlr.fromlineno, 4) self.assertEqual(hdlr.tolineno, 5) self.assertEqual(hdlr.blockstart_tolineno, 4) def test_try_finally_lineno(self): astroid = builder.parse(''' try: print (a) finally: print ("bouh") ''', __name__) try_ = astroid.body[0] self.assertEqual(try_.fromlineno, 2) self.assertEqual(try_.tolineno, 5) self.assertEqual(try_.blockstart_tolineno, 2) self.assertEqual(try_.finalbody[0].fromlineno, 5) # XXX self.assertEqual(try_.finalbody[0].tolineno, 5) def test_try_finally_25_lineno(self): astroid = builder.parse(''' try: print (a) except: pass finally: print ("bouh") ''', __name__) try_ = astroid.body[0] self.assertEqual(try_.fromlineno, 2) self.assertEqual(try_.tolineno, 7) self.assertEqual(try_.blockstart_tolineno, 2) self.assertEqual(try_.finalbody[0].fromlineno, 7) # XXX self.assertEqual(try_.finalbody[0].tolineno, 7) def test_with_lineno(self): astroid = builder.parse(''' from __future__ import with_statement with file("/tmp/pouet") as f: print (f) ''', __name__) with_ = astroid.body[1] self.assertEqual(with_.fromlineno, 3) self.assertEqual(with_.tolineno, 4) self.assertEqual(with_.blockstart_tolineno, 3) class BuilderTest(unittest.TestCase): def setUp(self): self.builder = builder.AstroidBuilder() def test_data_build_null_bytes(self): with self.assertRaises(exceptions.AstroidSyntaxError): self.builder.string_build('\x00') def test_data_build_invalid_x_escape(self): with self.assertRaises(exceptions.AstroidSyntaxError): self.builder.string_build('"\\x1"') def test_missing_newline(self): """check that a file with no trailing new line is parseable""" resources.build_file('data/noendingnewline.py') def test_missing_file(self): with self.assertRaises(exceptions.AstroidBuildingError): resources.build_file('data/inexistant.py') def test_inspect_build0(self): """test astroid tree build from a living object""" builtin_ast = MANAGER.ast_from_module_name(BUILTINS) # just check type and object are there builtin_ast.getattr('type') objectastroid = builtin_ast.getattr('object')[0] self.assertIsInstance(objectastroid.getattr('__new__')[0], nodes.FunctionDef) # check open file alias builtin_ast.getattr('open') # check 'help' is there (defined dynamically by site.py) builtin_ast.getattr('help') # check property has __init__ pclass = builtin_ast['property'] self.assertIn('__init__', pclass) self.assertIsInstance(builtin_ast['None'], nodes.Const) self.assertIsInstance(builtin_ast['True'], nodes.Const) self.assertIsInstance(builtin_ast['False'], nodes.Const) self.assertIsInstance(builtin_ast['Exception'], nodes.ClassDef) self.assertIsInstance(builtin_ast['NotImplementedError'], nodes.ClassDef) def test_inspect_build1(self): time_ast = MANAGER.ast_from_module_name('time') self.assertTrue(time_ast) self.assertEqual(time_ast['time'].args.defaults, []) if os.name == 'java': test_inspect_build1 = unittest.expectedFailure(test_inspect_build1) def test_inspect_build2(self): """test astroid tree build from a living object""" try: from mx import DateTime except ImportError: self.skipTest('test skipped: mxDateTime is not available') else: dt_ast = self.builder.inspect_build(DateTime) dt_ast.getattr('DateTime') # this one is failing since DateTimeType.__module__ = 'builtins' ! #dt_ast.getattr('DateTimeType') def test_inspect_build3(self): self.builder.inspect_build(unittest) @test_utils.require_version(maxver='3.0') def test_inspect_build_instance(self): """test astroid tree build from a living object""" import exceptions as builtin_exceptions builtin_ast = self.builder.inspect_build(builtin_exceptions) fclass = builtin_ast['OSError'] # things like OSError.strerror are now (2.5) data descriptors on the # class instead of entries in the __dict__ of an instance container = fclass self.assertIn('errno', container) self.assertIn('strerror', container) self.assertIn('filename', container) def test_inspect_build_type_object(self): builtin_ast = MANAGER.ast_from_module_name(BUILTINS) inferred = list(builtin_ast.igetattr('object')) self.assertEqual(len(inferred), 1) inferred = inferred[0] self.assertEqual(inferred.name, 'object') inferred.as_string() # no crash test inferred = list(builtin_ast.igetattr('type')) self.assertEqual(len(inferred), 1) inferred = inferred[0] self.assertEqual(inferred.name, 'type') inferred.as_string() # no crash test def test_inspect_transform_module(self): # ensure no cached version of the time module MANAGER._mod_file_cache.pop(('time', None), None) MANAGER.astroid_cache.pop('time', None) def transform_time(node): if node.name == 'time': node.transformed = True MANAGER.register_transform(nodes.Module, transform_time) try: time_ast = MANAGER.ast_from_module_name('time') self.assertTrue(getattr(time_ast, 'transformed', False)) finally: MANAGER.unregister_transform(nodes.Module, transform_time) def test_package_name(self): """test base properties and method of a astroid module""" datap = resources.build_file('data/__init__.py', 'data') self.assertEqual(datap.name, 'data') self.assertEqual(datap.package, 1) datap = resources.build_file('data/__init__.py', 'data.__init__') self.assertEqual(datap.name, 'data') self.assertEqual(datap.package, 1) datap = resources.build_file('data/tmp__init__.py', 'data.tmp__init__') self.assertEqual(datap.name, 'data.tmp__init__') self.assertEqual(datap.package, 0) def test_yield_parent(self): """check if we added discard nodes as yield parent (w/ compiler)""" code = """ def yiell(): #@ yield 0 if noe: yield more """ func = builder.extract_node(code) self.assertIsInstance(func, nodes.FunctionDef) stmt = func.body[0] self.assertIsInstance(stmt, nodes.Expr) self.assertIsInstance(stmt.value, nodes.Yield) self.assertIsInstance(func.body[1].body[0], nodes.Expr) self.assertIsInstance(func.body[1].body[0].value, nodes.Yield) def test_object(self): obj_ast = self.builder.inspect_build(object) self.assertIn('__setattr__', obj_ast) def test_newstyle_detection(self): data = ''' class A: "old style" class B(A): "old style" class C(object): "new style" class D(C): "new style" __metaclass__ = type class E(A): "old style" class F: "new style" ''' mod_ast = builder.parse(data, __name__) self.assertTrue(mod_ast['A'].newstyle) self.assertTrue(mod_ast['B'].newstyle) self.assertTrue(mod_ast['E'].newstyle) self.assertTrue(mod_ast['C'].newstyle) self.assertTrue(mod_ast['D'].newstyle) self.assertTrue(mod_ast['F'].newstyle) def test_globals(self): data = ''' CSTE = 1 def update_global(): global CSTE CSTE += 1 def global_no_effect(): global CSTE2 print (CSTE) ''' astroid = builder.parse(data, __name__) self.assertEqual(len(astroid.getattr('CSTE')), 2) self.assertIsInstance(astroid.getattr('CSTE')[0], nodes.AssignName) self.assertEqual(astroid.getattr('CSTE')[0].fromlineno, 2) self.assertEqual(astroid.getattr('CSTE')[1].fromlineno, 6) with self.assertRaises(exceptions.AttributeInferenceError): astroid.getattr('CSTE2') with self.assertRaises(exceptions.InferenceError): next(astroid['global_no_effect'].ilookup('CSTE2')) def test_socket_build(self): import socket astroid = self.builder.module_build(socket) # XXX just check the first one. Actually 3 objects are inferred (look at # the socket module) but the last one as those attributes dynamically # set and astroid is missing this. for fclass in astroid.igetattr('socket'): self.assertIn('connect', fclass) self.assertIn('send', fclass) self.assertIn('close', fclass) break def test_gen_expr_var_scope(self): data = 'l = list(n for n in range(10))\n' astroid = builder.parse(data, __name__) # n unavailable outside gen expr scope self.assertNotIn('n', astroid) # test n is inferable anyway n = test_utils.get_name_node(astroid, 'n') self.assertIsNot(n.scope(), astroid) self.assertEqual([i.__class__ for i in n.infer()], [util.Uninferable.__class__]) def test_no_future_imports(self): mod = builder.parse("import sys") self.assertEqual(set(), mod.future_imports) def test_future_imports(self): mod = builder.parse("from __future__ import print_function") self.assertEqual({'print_function'}, mod.future_imports) def test_two_future_imports(self): mod = builder.parse(""" from __future__ import print_function from __future__ import absolute_import """) self.assertEqual({'print_function', 'absolute_import'}, mod.future_imports) def test_inferred_build(self): code = ''' class A: pass A.type = "class" def A_assign_type(self): print (self) A.assign_type = A_assign_type ''' astroid = builder.parse(code) lclass = list(astroid.igetattr('A')) self.assertEqual(len(lclass), 1) lclass = lclass[0] self.assertIn('assign_type', lclass.locals) self.assertIn('type', lclass.locals) def test_augassign_attr(self): builder.parse(""" class Counter: v = 0 def inc(self): self.v += 1 """, __name__) # TODO: Check self.v += 1 generate AugAssign(AssAttr(...)), # not AugAssign(GetAttr(AssName...)) def test_inferred_dont_pollute(self): code = ''' def func(a=None): a.custom_attr = 0 def func2(a={}): a.custom_attr = 0 ''' builder.parse(code) nonetype = nodes.const_factory(None) # pylint: disable=no-member; Infers two potential values self.assertNotIn('custom_attr', nonetype.locals) self.assertNotIn('custom_attr', nonetype.instance_attrs) nonetype = nodes.const_factory({}) self.assertNotIn('custom_attr', nonetype.locals) self.assertNotIn('custom_attr', nonetype.instance_attrs) def test_asstuple(self): code = 'a, b = range(2)' astroid = builder.parse(code) self.assertIn('b', astroid.locals) code = ''' def visit_if(self, node): node.test, body = node.tests[0] ''' astroid = builder.parse(code) self.assertIn('body', astroid['visit_if'].locals) def test_build_constants(self): '''test expected values of constants after rebuilding''' code = ''' def func(): return None return return 'None' ''' astroid = builder.parse(code) none, nothing, chain = [ret.value for ret in astroid.body[0].body] self.assertIsInstance(none, nodes.Const) self.assertIsNone(none.value) self.assertIsNone(nothing) self.assertIsInstance(chain, nodes.Const) self.assertEqual(chain.value, 'None') def test_not_implemented(self): node = builder.extract_node(''' NotImplemented #@ ''') inferred = next(node.infer()) self.assertIsInstance(inferred, nodes.Const) self.assertEqual(inferred.value, NotImplemented) class FileBuildTest(unittest.TestCase): def setUp(self): self.module = resources.build_file('data/module.py', 'data.module') def test_module_base_props(self): """test base properties and method of a astroid module""" module = self.module self.assertEqual(module.name, 'data.module') self.assertEqual(module.doc, "test module for astroid\n") self.assertEqual(module.fromlineno, 0) self.assertIsNone(module.parent) self.assertEqual(module.frame(), module) self.assertEqual(module.root(), module) self.assertEqual(module.file, os.path.abspath(resources.find('data/module.py'))) self.assertEqual(module.pure_python, 1) self.assertEqual(module.package, 0) self.assertFalse(module.is_statement) self.assertEqual(module.statement(), module) self.assertEqual(module.statement(), module) def test_module_locals(self): """test the 'locals' dictionary of a astroid module""" module = self.module _locals = module.locals self.assertIs(_locals, module.globals) keys = sorted(_locals.keys()) should = ['MY_DICT', 'NameNode', 'YO', 'YOUPI', '__revision__', 'global_access', 'modutils', 'four_args', 'os', 'redirect'] should.sort() self.assertEqual(keys, sorted(should)) def test_function_base_props(self): """test base properties and method of a astroid function""" module = self.module function = module['global_access'] self.assertEqual(function.name, 'global_access') self.assertEqual(function.doc, 'function test') self.assertEqual(function.fromlineno, 11) self.assertTrue(function.parent) self.assertEqual(function.frame(), function) self.assertEqual(function.parent.frame(), module) self.assertEqual(function.root(), module) self.assertEqual([n.name for n in function.args.args], ['key', 'val']) self.assertEqual(function.type, 'function') def test_function_locals(self): """test the 'locals' dictionary of a astroid function""" _locals = self.module['global_access'].locals self.assertEqual(len(_locals), 4) keys = sorted(_locals.keys()) self.assertEqual(keys, ['i', 'key', 'local', 'val']) def test_class_base_props(self): """test base properties and method of a astroid class""" module = self.module klass = module['YO'] self.assertEqual(klass.name, 'YO') self.assertEqual(klass.doc, 'hehe\n haha') self.assertEqual(klass.fromlineno, 25) self.assertTrue(klass.parent) self.assertEqual(klass.frame(), klass) self.assertEqual(klass.parent.frame(), module) self.assertEqual(klass.root(), module) self.assertEqual(klass.basenames, []) self.assertTrue(klass.newstyle) def test_class_locals(self): """test the 'locals' dictionary of a astroid class""" module = self.module klass1 = module['YO'] locals1 = klass1.locals keys = sorted(locals1.keys()) assert_keys = ['__init__', '__module__', '__qualname__', 'a'] if sys.version_info < (3, 3): assert_keys.pop(assert_keys.index('__qualname__')) self.assertEqual(keys, assert_keys) klass2 = module['YOUPI'] locals2 = klass2.locals keys = locals2.keys() assert_keys = [ '__init__', '__module__', '__qualname__', 'class_attr', 'class_method', 'method', 'static_method' ] if sys.version_info < (3, 3): assert_keys.pop(assert_keys.index('__qualname__')) self.assertEqual(sorted(keys), assert_keys) def test_class_instance_attrs(self): module = self.module klass1 = module['YO'] klass2 = module['YOUPI'] self.assertEqual(list(klass1.instance_attrs.keys()), ['yo']) self.assertEqual(list(klass2.instance_attrs.keys()), ['member']) def test_class_basenames(self): module = self.module klass1 = module['YO'] klass2 = module['YOUPI'] self.assertEqual(klass1.basenames, []) self.assertEqual(klass2.basenames, ['YO']) def test_method_base_props(self): """test base properties and method of a astroid method""" klass2 = self.module['YOUPI'] # "normal" method method = klass2['method'] self.assertEqual(method.name, 'method') self.assertEqual([n.name for n in method.args.args], ['self']) self.assertEqual(method.doc, 'method\n test') self.assertEqual(method.fromlineno, 48) self.assertEqual(method.type, 'method') # class method method = klass2['class_method'] self.assertEqual([n.name for n in method.args.args], ['cls']) self.assertEqual(method.type, 'classmethod') # static method method = klass2['static_method'] self.assertEqual(method.args.args, []) self.assertEqual(method.type, 'staticmethod') def test_method_locals(self): """test the 'locals' dictionary of a astroid method""" method = self.module['YOUPI']['method'] _locals = method.locals keys = sorted(_locals) if sys.version_info < (3, 0): self.assertEqual(len(_locals), 5) self.assertEqual(keys, ['a', 'autre', 'b', 'local', 'self']) else:# ListComp variables are no more accessible outside self.assertEqual(len(_locals), 4) self.assertEqual(keys, ['__class__', 'autre', 'local', 'self']) def test_unknown_encoding(self): with self.assertRaises(exceptions.AstroidSyntaxError): resources.build_file('data/invalid_encoding.py') class ModuleBuildTest(resources.SysPathSetup, FileBuildTest): def setUp(self): super(ModuleBuildTest, self).setUp() abuilder = builder.AstroidBuilder() try: import data.module except ImportError: # Make pylint happy. self.skipTest('Unable to load data.module') else: self.module = abuilder.module_build(data.module, 'data.module') def test_module_build_dunder_file(): """Test that module_build() can work with modules that have the *__file__* attribute""" module = builder.AstroidBuilder().module_build(collections) assert module.path[0] == collections.__file__ if __name__ == '__main__': unittest.main() astroid-2.0.1/astroid/tests/unittest_helpers.py0000644000076500000240000002152513324063433022520 0ustar claudiustaff00000000000000# Copyright (c) 2015-2016, 2018 Claudiu Popa # Copyright (c) 2015-2016 Ceridwen # Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html # For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER import unittest import builtins from astroid import builder from astroid import exceptions from astroid import helpers from astroid import manager from astroid import raw_building from astroid import test_utils from astroid import util class TestHelpers(unittest.TestCase): def setUp(self): builtins_name = builtins.__name__ astroid_manager = manager.AstroidManager() self.builtins = astroid_manager.astroid_cache[builtins_name] self.manager = manager.AstroidManager() def _extract(self, obj_name): return self.builtins.getattr(obj_name)[0] def _build_custom_builtin(self, obj_name): proxy = raw_building.build_class(obj_name) proxy.parent = self.builtins return proxy def assert_classes_equal(self, cls, other): self.assertEqual(cls.name, other.name) self.assertEqual(cls.parent, other.parent) self.assertEqual(cls.qname(), other.qname()) def test_object_type(self): pairs = [ ('1', self._extract('int')), ('[]', self._extract('list')), ('{1, 2, 3}', self._extract('set')), ('{1:2, 4:3}', self._extract('dict')), ('type', self._extract('type')), ('object', self._extract('type')), ('object()', self._extract('object')), ('lambda: None', self._build_custom_builtin('function')), ('len', self._build_custom_builtin('builtin_function_or_method')), ('None', self._build_custom_builtin('NoneType')), ('import sys\nsys#@', self._build_custom_builtin('module')), ] for code, expected in pairs: node = builder.extract_node(code) objtype = helpers.object_type(node) self.assert_classes_equal(objtype, expected) def test_object_type_classes_and_functions(self): ast_nodes = builder.extract_node(''' def generator(): yield class A(object): def test(self): self #@ @classmethod def cls_method(cls): pass @staticmethod def static_method(): pass A #@ A() #@ A.test #@ A().test #@ A.cls_method #@ A().cls_method #@ A.static_method #@ A().static_method #@ generator() #@ ''') from_self = helpers.object_type(ast_nodes[0]) cls = next(ast_nodes[1].infer()) self.assert_classes_equal(from_self, cls) cls_type = helpers.object_type(ast_nodes[1]) self.assert_classes_equal(cls_type, self._extract('type')) instance_type = helpers.object_type(ast_nodes[2]) cls = next(ast_nodes[2].infer())._proxied self.assert_classes_equal(instance_type, cls) expected_method_types = [ (ast_nodes[3], 'function'), (ast_nodes[4], 'method'), (ast_nodes[5], 'method'), (ast_nodes[6], 'method'), (ast_nodes[7], 'function'), (ast_nodes[8], 'function'), (ast_nodes[9], 'generator'), ] for node, expected in expected_method_types: node_type = helpers.object_type(node) expected_type = self._build_custom_builtin(expected) self.assert_classes_equal(node_type, expected_type) @test_utils.require_version(minver='3.0') def test_object_type_metaclasses(self): module = builder.parse(''' import abc class Meta(metaclass=abc.ABCMeta): pass meta_instance = Meta() ''') meta_type = helpers.object_type(module['Meta']) self.assert_classes_equal(meta_type, module['Meta'].metaclass()) meta_instance = next(module['meta_instance'].infer()) instance_type = helpers.object_type(meta_instance) self.assert_classes_equal(instance_type, module['Meta']) @test_utils.require_version(minver='3.0') def test_object_type_most_derived(self): node = builder.extract_node(''' class A(type): def __new__(*args, **kwargs): return type.__new__(*args, **kwargs) class B(object): pass class C(object, metaclass=A): pass # The most derived metaclass of D is A rather than type. class D(B , C): #@ pass ''') metaclass = node.metaclass() self.assertEqual(metaclass.name, 'A') obj_type = helpers.object_type(node) self.assertEqual(metaclass, obj_type) def test_inference_errors(self): node = builder.extract_node(''' from unknown import Unknown u = Unknown #@ ''') self.assertEqual(helpers.object_type(node), util.Uninferable) def test_object_type_too_many_types(self): node = builder.extract_node(''' from unknown import Unknown def test(x): if x: return lambda: None else: return 1 test(Unknown) #@ ''') self.assertEqual(helpers.object_type(node), util.Uninferable) def test_is_subtype(self): ast_nodes = builder.extract_node(''' class int_subclass(int): pass class A(object): pass #@ class B(A): pass #@ class C(A): pass #@ int_subclass() #@ ''') cls_a = ast_nodes[0] cls_b = ast_nodes[1] cls_c = ast_nodes[2] int_subclass = ast_nodes[3] int_subclass = helpers.object_type(next(int_subclass.infer())) base_int = self._extract('int') self.assertTrue(helpers.is_subtype(int_subclass, base_int)) self.assertTrue(helpers.is_supertype(base_int, int_subclass)) self.assertTrue(helpers.is_supertype(cls_a, cls_b)) self.assertTrue(helpers.is_supertype(cls_a, cls_c)) self.assertTrue(helpers.is_subtype(cls_b, cls_a)) self.assertTrue(helpers.is_subtype(cls_c, cls_a)) self.assertFalse(helpers.is_subtype(cls_a, cls_b)) self.assertFalse(helpers.is_subtype(cls_a, cls_b)) @test_utils.require_version(maxver='3.0') def test_is_subtype_supertype_old_style_classes(self): cls_a, cls_b = builder.extract_node(''' class A: #@ pass class B(A): #@ pass ''') self.assertFalse(helpers.is_subtype(cls_a, cls_b)) self.assertFalse(helpers.is_subtype(cls_b, cls_a)) self.assertFalse(helpers.is_supertype(cls_a, cls_b)) self.assertFalse(helpers.is_supertype(cls_b, cls_a)) def test_is_subtype_supertype_mro_error(self): cls_e, cls_f = builder.extract_node(''' class A(object): pass class B(A): pass class C(A): pass class D(B, C): pass class E(C, B): pass #@ class F(D, E): pass #@ ''') self.assertFalse(helpers.is_subtype(cls_e, cls_f)) self.assertFalse(helpers.is_subtype(cls_e, cls_f)) with self.assertRaises(exceptions._NonDeducibleTypeHierarchy): helpers.is_subtype(cls_f, cls_e) self.assertFalse(helpers.is_supertype(cls_f, cls_e)) def test_is_subtype_supertype_unknown_bases(self): cls_a, cls_b = builder.extract_node(''' from unknown import Unknown class A(Unknown): pass #@ class B(A): pass #@ ''') with self.assertRaises(exceptions._NonDeducibleTypeHierarchy): helpers.is_subtype(cls_a, cls_b) with self.assertRaises(exceptions._NonDeducibleTypeHierarchy): helpers.is_supertype(cls_a, cls_b) def test_is_subtype_supertype_unrelated_classes(self): cls_a, cls_b = builder.extract_node(''' class A(object): pass #@ class B(object): pass #@ ''') self.assertFalse(helpers.is_subtype(cls_a, cls_b)) self.assertFalse(helpers.is_subtype(cls_b, cls_a)) self.assertFalse(helpers.is_supertype(cls_a, cls_b)) self.assertFalse(helpers.is_supertype(cls_b, cls_a)) def test_is_subtype_supertype_classes_no_type_ancestor(self): cls_a = builder.extract_node(''' class A(object): #@ pass ''') builtin_type = self._extract('type') self.assertFalse(helpers.is_supertype(builtin_type, cls_a)) self.assertFalse(helpers.is_subtype(cls_a, builtin_type)) def test_is_subtype_supertype_classes_metaclasses(self): cls_a = builder.extract_node(''' class A(type): #@ pass ''') builtin_type = self._extract('type') self.assertTrue(helpers.is_supertype(builtin_type, cls_a)) self.assertTrue(helpers.is_subtype(cls_a, builtin_type)) if __name__ == '__main__': unittest.main() astroid-2.0.1/astroid/tests/unittest_inference.py0000644000076500000240000046517113324063433023025 0ustar claudiustaff00000000000000# -*- coding: utf-8 -*- # Copyright (c) 2006-2015 LOGILAB S.A. (Paris, FRANCE) # Copyright (c) 2007 Marien Zwart # Copyright (c) 2013-2014 Google, Inc. # Copyright (c) 2014-2018 Claudiu Popa # Copyright (c) 2014 Eevee (Alex Munroe) # Copyright (c) 2015-2016 Ceridwen # Copyright (c) 2015 Dmitry Pribysh # Copyright (c) 2015 Rene Zhang # Copyright (c) 2016 Jakub Wilk # Copyright (c) 2017 Hugo # Copyright (c) 2017 Łukasz Rogalski # Copyright (c) 2017 Calen Pennington # Copyright (c) 2017 Calen Pennington # Copyright (c) 2017 David Euresti # Copyright (c) 2017 Derek Gustafson # Copyright (c) 2018 Bryce Guinta # Copyright (c) 2018 Anthony Sottile # Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html # For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER """tests for the astroid inference capabilities """ # pylint: disable=too-many-lines import platform import sys from functools import partial import unittest from unittest.mock import patch import pytest from astroid import InferenceError, builder, nodes from astroid.builder import parse, extract_node from astroid.inference import infer_end as inference_infer_end from astroid.bases import Instance, BoundMethod, UnboundMethod,\ BUILTINS from astroid import arguments from astroid import decorators as decoratorsmod from astroid import exceptions from astroid import helpers from astroid import objects from astroid import test_utils from astroid import util from astroid.tests import resources def get_node_of_class(start_from, klass): return next(start_from.nodes_of_class(klass)) builder = builder.AstroidBuilder() if sys.version_info < (3, 0): EXC_MODULE = 'exceptions' BOOL_SPECIAL_METHOD = '__nonzero__' else: EXC_MODULE = BUILTINS BOOL_SPECIAL_METHOD = '__bool__' class InferenceUtilsTest(unittest.TestCase): def test_path_wrapper(self): def infer_default(self, *args): raise InferenceError infer_default = decoratorsmod.path_wrapper(infer_default) infer_end = decoratorsmod.path_wrapper(inference_infer_end) with self.assertRaises(InferenceError): next(infer_default(1)) self.assertEqual(next(infer_end(1)), 1) def _assertInferElts(node_type, self, node, elts): inferred = next(node.infer()) self.assertIsInstance(inferred, node_type) self.assertEqual(sorted(elt.value for elt in inferred.elts), elts) def partialmethod(func, arg): """similar to functools.partial but return a lambda instead of a class so returned value may be turned into a method. """ return lambda *args, **kwargs: func(arg, *args, **kwargs) class InferenceTest(resources.SysPathSetup, unittest.TestCase): # additional assertInfer* method for builtin types def assertInferConst(self, node, expected): inferred = next(node.infer()) self.assertIsInstance(inferred, nodes.Const) self.assertEqual(inferred.value, expected) def assertInferDict(self, node, expected): inferred = next(node.infer()) self.assertIsInstance(inferred, nodes.Dict) elts = {(key.value, value.value) for (key, value) in inferred.items} self.assertEqual(sorted(elts), sorted(expected.items())) assertInferTuple = partialmethod(_assertInferElts, nodes.Tuple) assertInferList = partialmethod(_assertInferElts, nodes.List) assertInferSet = partialmethod(_assertInferElts, nodes.Set) assertInferFrozenSet = partialmethod(_assertInferElts, objects.FrozenSet) CODE = ''' class C(object): "new style" attr = 4 def meth1(self, arg1, optarg=0): var = object() print ("yo", arg1, optarg) self.iattr = "hop" return var def meth2(self): self.meth1(*self.meth3) def meth3(self, d=attr): b = self.attr c = self.iattr return b, c ex = Exception("msg") v = C().meth1(1) m_unbound = C.meth1 m_bound = C().meth1 a, b, c = ex, 1, "bonjour" [d, e, f] = [ex, 1.0, ("bonjour", v)] g, h = f i, (j, k) = "glup", f a, b= b, a # Gasp ! ''' ast = parse(CODE, __name__) def test_infer_abstract_property_return_values(self): module = parse(''' import abc class A(object): @abc.abstractproperty def test(self): return 42 a = A() x = a.test ''') inferred = next(module['x'].infer()) self.assertIsInstance(inferred, nodes.Const) self.assertEqual(inferred.value, 42) def test_module_inference(self): inferred = self.ast.infer() obj = next(inferred) self.assertEqual(obj.name, __name__) self.assertEqual(obj.root().name, __name__) self.assertRaises(StopIteration, partial(next, inferred)) def test_class_inference(self): inferred = self.ast['C'].infer() obj = next(inferred) self.assertEqual(obj.name, 'C') self.assertEqual(obj.root().name, __name__) self.assertRaises(StopIteration, partial(next, inferred)) def test_function_inference(self): inferred = self.ast['C']['meth1'].infer() obj = next(inferred) self.assertEqual(obj.name, 'meth1') self.assertEqual(obj.root().name, __name__) self.assertRaises(StopIteration, partial(next, inferred)) def test_builtin_name_inference(self): inferred = self.ast['C']['meth1']['var'].infer() var = next(inferred) self.assertEqual(var.name, 'object') self.assertEqual(var.root().name, BUILTINS) self.assertRaises(StopIteration, partial(next, inferred)) def test_tupleassign_name_inference(self): inferred = self.ast['a'].infer() exc = next(inferred) self.assertIsInstance(exc, Instance) self.assertEqual(exc.name, 'Exception') self.assertEqual(exc.root().name, EXC_MODULE) self.assertRaises(StopIteration, partial(next, inferred)) inferred = self.ast['b'].infer() const = next(inferred) self.assertIsInstance(const, nodes.Const) self.assertEqual(const.value, 1) self.assertRaises(StopIteration, partial(next, inferred)) inferred = self.ast['c'].infer() const = next(inferred) self.assertIsInstance(const, nodes.Const) self.assertEqual(const.value, "bonjour") self.assertRaises(StopIteration, partial(next, inferred)) def test_listassign_name_inference(self): inferred = self.ast['d'].infer() exc = next(inferred) self.assertIsInstance(exc, Instance) self.assertEqual(exc.name, 'Exception') self.assertEqual(exc.root().name, EXC_MODULE) self.assertRaises(StopIteration, partial(next, inferred)) inferred = self.ast['e'].infer() const = next(inferred) self.assertIsInstance(const, nodes.Const) self.assertEqual(const.value, 1.0) self.assertRaises(StopIteration, partial(next, inferred)) inferred = self.ast['f'].infer() const = next(inferred) self.assertIsInstance(const, nodes.Tuple) self.assertRaises(StopIteration, partial(next, inferred)) def test_advanced_tupleassign_name_inference1(self): inferred = self.ast['g'].infer() const = next(inferred) self.assertIsInstance(const, nodes.Const) self.assertEqual(const.value, "bonjour") self.assertRaises(StopIteration, partial(next, inferred)) inferred = self.ast['h'].infer() var = next(inferred) self.assertEqual(var.name, 'object') self.assertEqual(var.root().name, BUILTINS) self.assertRaises(StopIteration, partial(next, inferred)) def test_advanced_tupleassign_name_inference2(self): inferred = self.ast['i'].infer() const = next(inferred) self.assertIsInstance(const, nodes.Const) self.assertEqual(const.value, "glup") self.assertRaises(StopIteration, partial(next, inferred)) inferred = self.ast['j'].infer() const = next(inferred) self.assertIsInstance(const, nodes.Const) self.assertEqual(const.value, "bonjour") self.assertRaises(StopIteration, partial(next, inferred)) inferred = self.ast['k'].infer() var = next(inferred) self.assertEqual(var.name, 'object') self.assertEqual(var.root().name, BUILTINS) self.assertRaises(StopIteration, partial(next, inferred)) def test_swap_assign_inference(self): inferred = self.ast.locals['a'][1].infer() const = next(inferred) self.assertIsInstance(const, nodes.Const) self.assertEqual(const.value, 1) self.assertRaises(StopIteration, partial(next, inferred)) inferred = self.ast.locals['b'][1].infer() exc = next(inferred) self.assertIsInstance(exc, Instance) self.assertEqual(exc.name, 'Exception') self.assertEqual(exc.root().name, EXC_MODULE) self.assertRaises(StopIteration, partial(next, inferred)) def test_getattr_inference1(self): inferred = self.ast['ex'].infer() exc = next(inferred) self.assertIsInstance(exc, Instance) self.assertEqual(exc.name, 'Exception') self.assertEqual(exc.root().name, EXC_MODULE) self.assertRaises(StopIteration, partial(next, inferred)) def test_getattr_inference2(self): inferred = get_node_of_class(self.ast['C']['meth2'], nodes.Attribute).infer() meth1 = next(inferred) self.assertEqual(meth1.name, 'meth1') self.assertEqual(meth1.root().name, __name__) self.assertRaises(StopIteration, partial(next, inferred)) def test_getattr_inference3(self): inferred = self.ast['C']['meth3']['b'].infer() const = next(inferred) self.assertIsInstance(const, nodes.Const) self.assertEqual(const.value, 4) self.assertRaises(StopIteration, partial(next, inferred)) def test_getattr_inference4(self): inferred = self.ast['C']['meth3']['c'].infer() const = next(inferred) self.assertIsInstance(const, nodes.Const) self.assertEqual(const.value, "hop") self.assertRaises(StopIteration, partial(next, inferred)) def test_callfunc_inference(self): inferred = self.ast['v'].infer() meth1 = next(inferred) self.assertIsInstance(meth1, Instance) self.assertEqual(meth1.name, 'object') self.assertEqual(meth1.root().name, BUILTINS) self.assertRaises(StopIteration, partial(next, inferred)) def test_unbound_method_inference(self): inferred = self.ast['m_unbound'].infer() meth1 = next(inferred) self.assertIsInstance(meth1, UnboundMethod) self.assertEqual(meth1.name, 'meth1') self.assertEqual(meth1.parent.frame().name, 'C') self.assertRaises(StopIteration, partial(next, inferred)) def test_bound_method_inference(self): inferred = self.ast['m_bound'].infer() meth1 = next(inferred) self.assertIsInstance(meth1, BoundMethod) self.assertEqual(meth1.name, 'meth1') self.assertEqual(meth1.parent.frame().name, 'C') self.assertRaises(StopIteration, partial(next, inferred)) def test_args_default_inference1(self): optarg = test_utils.get_name_node(self.ast['C']['meth1'], 'optarg') inferred = optarg.infer() obj1 = next(inferred) self.assertIsInstance(obj1, nodes.Const) self.assertEqual(obj1.value, 0) obj1 = next(inferred) self.assertIs(obj1, util.Uninferable, obj1) self.assertRaises(StopIteration, partial(next, inferred)) def test_args_default_inference2(self): inferred = self.ast['C']['meth3'].ilookup('d') obj1 = next(inferred) self.assertIsInstance(obj1, nodes.Const) self.assertEqual(obj1.value, 4) obj1 = next(inferred) self.assertIs(obj1, util.Uninferable, obj1) self.assertRaises(StopIteration, partial(next, inferred)) def test_inference_restrictions(self): inferred = test_utils.get_name_node(self.ast['C']['meth1'], 'arg1').infer() obj1 = next(inferred) self.assertIs(obj1, util.Uninferable, obj1) self.assertRaises(StopIteration, partial(next, inferred)) def test_ancestors_inference(self): code = ''' class A(object): #@ pass class A(A): #@ pass ''' a1, a2 = extract_node(code, __name__) a2_ancestors = list(a2.ancestors()) self.assertEqual(len(a2_ancestors), 2) self.assertIs(a2_ancestors[0], a1) def test_ancestors_inference2(self): code = ''' class A(object): #@ pass class B(A): #@ pass class A(B): #@ pass ''' a1, b, a2 = extract_node(code, __name__) a2_ancestors = list(a2.ancestors()) self.assertEqual(len(a2_ancestors), 3) self.assertIs(a2_ancestors[0], b) self.assertIs(a2_ancestors[1], a1) def test_f_arg_f(self): code = ''' def f(f=1): return f a = f() ''' ast = parse(code, __name__) a = ast['a'] a_inferred = a.inferred() self.assertEqual(a_inferred[0].value, 1) self.assertEqual(len(a_inferred), 1) def test_exc_ancestors(self): code = ''' def f(): raise __(NotImplementedError) ''' error = extract_node(code, __name__) nie = error.inferred()[0] self.assertIsInstance(nie, nodes.ClassDef) nie_ancestors = [c.name for c in nie.ancestors()] if sys.version_info < (3, 0): expected = ['RuntimeError', 'StandardError', 'Exception', 'BaseException', 'object'] self.assertEqual(nie_ancestors, expected) else: expected = ['RuntimeError', 'Exception', 'BaseException', 'object'] self.assertEqual(nie_ancestors, expected) def test_except_inference(self): code = ''' try: print (hop) except NameError as ex: ex1 = ex except Exception as ex: ex2 = ex raise ''' ast = parse(code, __name__) ex1 = ast['ex1'] ex1_infer = ex1.infer() ex1 = next(ex1_infer) self.assertIsInstance(ex1, Instance) self.assertEqual(ex1.name, 'NameError') self.assertRaises(StopIteration, partial(next, ex1_infer)) ex2 = ast['ex2'] ex2_infer = ex2.infer() ex2 = next(ex2_infer) self.assertIsInstance(ex2, Instance) self.assertEqual(ex2.name, 'Exception') self.assertRaises(StopIteration, partial(next, ex2_infer)) def test_del1(self): code = ''' del undefined_attr ''' delete = extract_node(code, __name__) self.assertRaises(InferenceError, delete.infer) def test_del2(self): code = ''' a = 1 b = a del a c = a a = 2 d = a ''' ast = parse(code, __name__) n = ast['b'] n_infer = n.infer() inferred = next(n_infer) self.assertIsInstance(inferred, nodes.Const) self.assertEqual(inferred.value, 1) self.assertRaises(StopIteration, partial(next, n_infer)) n = ast['c'] n_infer = n.infer() self.assertRaises(InferenceError, partial(next, n_infer)) n = ast['d'] n_infer = n.infer() inferred = next(n_infer) self.assertIsInstance(inferred, nodes.Const) self.assertEqual(inferred.value, 2) self.assertRaises(StopIteration, partial(next, n_infer)) def test_builtin_types(self): code = ''' l = [1] t = (2,) d = {} s = '' s2 = '_' ''' ast = parse(code, __name__) n = ast['l'] inferred = next(n.infer()) self.assertIsInstance(inferred, nodes.List) self.assertIsInstance(inferred, Instance) self.assertEqual(inferred.getitem(nodes.Const(0)).value, 1) self.assertIsInstance(inferred._proxied, nodes.ClassDef) self.assertEqual(inferred._proxied.name, 'list') self.assertIn('append', inferred._proxied.locals) n = ast['t'] inferred = next(n.infer()) self.assertIsInstance(inferred, nodes.Tuple) self.assertIsInstance(inferred, Instance) self.assertEqual(inferred.getitem(nodes.Const(0)).value, 2) self.assertIsInstance(inferred._proxied, nodes.ClassDef) self.assertEqual(inferred._proxied.name, 'tuple') n = ast['d'] inferred = next(n.infer()) self.assertIsInstance(inferred, nodes.Dict) self.assertIsInstance(inferred, Instance) self.assertIsInstance(inferred._proxied, nodes.ClassDef) self.assertEqual(inferred._proxied.name, 'dict') self.assertIn('get', inferred._proxied.locals) n = ast['s'] inferred = next(n.infer()) self.assertIsInstance(inferred, nodes.Const) self.assertIsInstance(inferred, Instance) self.assertEqual(inferred.name, 'str') self.assertIn('lower', inferred._proxied.locals) n = ast['s2'] inferred = next(n.infer()) self.assertEqual(inferred.getitem(nodes.Const(0)).value, '_') code = 's = {1}' ast = parse(code, __name__) n = ast['s'] inferred = next(n.infer()) self.assertIsInstance(inferred, nodes.Set) self.assertIsInstance(inferred, Instance) self.assertEqual(inferred.name, 'set') self.assertIn('remove', inferred._proxied.locals) @test_utils.require_version(maxver='3.0') def test_unicode_type(self): code = '''u = u""''' ast = parse(code, __name__) n = ast['u'] inferred = next(n.infer()) self.assertIsInstance(inferred, nodes.Const) self.assertIsInstance(inferred, Instance) self.assertEqual(inferred.name, 'unicode') self.assertIn('lower', inferred._proxied.locals) @unittest.expectedFailure def test_descriptor_are_callable(self): code = ''' class A: statm = staticmethod(open) clsm = classmethod('whatever') ''' ast = parse(code, __name__) statm = next(ast['A'].igetattr('statm')) self.assertTrue(statm.callable()) clsm = next(ast['A'].igetattr('clsm')) self.assertFalse(clsm.callable()) def test_bt_ancestor_crash(self): code = ''' class Warning(Warning): pass ''' ast = parse(code, __name__) w = ast['Warning'] ancestors = w.ancestors() ancestor = next(ancestors) self.assertEqual(ancestor.name, 'Warning') self.assertEqual(ancestor.root().name, EXC_MODULE) ancestor = next(ancestors) self.assertEqual(ancestor.name, 'Exception') self.assertEqual(ancestor.root().name, EXC_MODULE) ancestor = next(ancestors) self.assertEqual(ancestor.name, 'BaseException') self.assertEqual(ancestor.root().name, EXC_MODULE) ancestor = next(ancestors) self.assertEqual(ancestor.name, 'object') self.assertEqual(ancestor.root().name, BUILTINS) self.assertRaises(StopIteration, partial(next, ancestors)) def test_qqch(self): code = ''' from astroid.modutils import load_module_from_name xxx = load_module_from_name('__pkginfo__') ''' ast = parse(code, __name__) xxx = ast['xxx'] self.assertSetEqual({n.__class__ for n in xxx.inferred()}, {nodes.Const, util.Uninferable.__class__}) def test_method_argument(self): code = ''' class ErudiEntitySchema: """a entity has a type, a set of subject and or object relations""" def __init__(self, e_type, **kwargs): kwargs['e_type'] = e_type.capitalize().encode() def meth(self, e_type, *args, **kwargs): kwargs['e_type'] = e_type.capitalize().encode() print(args) ''' ast = parse(code, __name__) arg = test_utils.get_name_node(ast['ErudiEntitySchema']['__init__'], 'e_type') self.assertEqual([n.__class__ for n in arg.infer()], [util.Uninferable.__class__]) arg = test_utils.get_name_node(ast['ErudiEntitySchema']['__init__'], 'kwargs') self.assertEqual([n.__class__ for n in arg.infer()], [nodes.Dict]) arg = test_utils.get_name_node(ast['ErudiEntitySchema']['meth'], 'e_type') self.assertEqual([n.__class__ for n in arg.infer()], [util.Uninferable.__class__]) arg = test_utils.get_name_node(ast['ErudiEntitySchema']['meth'], 'args') self.assertEqual([n.__class__ for n in arg.infer()], [nodes.Tuple]) arg = test_utils.get_name_node(ast['ErudiEntitySchema']['meth'], 'kwargs') self.assertEqual([n.__class__ for n in arg.infer()], [nodes.Dict]) def test_tuple_then_list(self): code = ''' def test_view(rql, vid, tags=()): tags = list(tags) __(tags).append(vid) ''' name = extract_node(code, __name__) it = name.infer() tags = next(it) self.assertIsInstance(tags, nodes.List) self.assertEqual(tags.elts, []) with self.assertRaises(StopIteration): next(it) def test_mulassign_inference(self): code = ''' def first_word(line): """Return the first word of a line""" return line.split()[0] def last_word(line): """Return last word of a line""" return line.split()[-1] def process_line(word_pos): """Silly function: returns (ok, callable) based on argument. For test purpose only. """ if word_pos > 0: return (True, first_word) elif word_pos < 0: return (True, last_word) else: return (False, None) if __name__ == '__main__': line_number = 0 for a_line in file('test_callable.py'): tupletest = process_line(line_number) (ok, fct) = process_line(line_number) if ok: fct(a_line) ''' ast = parse(code, __name__) self.assertEqual(len(list(ast['process_line'].infer_call_result(None))), 3) self.assertEqual(len(list(ast['tupletest'].infer())), 3) values = ['>1', __name__, __file__) self._test_const_inferred(ast['a'], 23>>1) def test_binary_op_int_shiftleft(self): ast = builder.string_build('a = 23 <<1', __name__, __file__) self._test_const_inferred(ast['a'], 23<<1) def test_binary_op_other_type(self): ast_nodes = extract_node(''' class A: def __add__(self, other): return other + 42 A() + 1 #@ 1 + A() #@ ''') first = next(ast_nodes[0].infer()) self.assertIsInstance(first, nodes.Const) self.assertEqual(first.value, 43) second = next(ast_nodes[1].infer()) self.assertEqual(second, util.Uninferable) def test_binary_op_other_type_using_reflected_operands(self): ast_nodes = extract_node(''' class A(object): def __radd__(self, other): return other + 42 A() + 1 #@ 1 + A() #@ ''') first = next(ast_nodes[0].infer()) self.assertEqual(first, util.Uninferable) second = next(ast_nodes[1].infer()) self.assertIsInstance(second, nodes.Const) self.assertEqual(second.value, 43) def test_binary_op_reflected_and_not_implemented_is_type_error(self): ast_node = extract_node(''' class A(object): def __radd__(self, other): return NotImplemented 1 + A() #@ ''') first = next(ast_node.infer()) self.assertEqual(first, util.Uninferable) def test_binary_op_list_mul(self): for code in ('a = [[]] * 2', 'a = 2 * [[]]'): ast = builder.string_build(code, __name__, __file__) inferred = list(ast['a'].infer()) self.assertEqual(len(inferred), 1) self.assertIsInstance(inferred[0], nodes.List) self.assertEqual(len(inferred[0].elts), 2) self.assertIsInstance(inferred[0].elts[0], nodes.List) self.assertIsInstance(inferred[0].elts[1], nodes.List) def test_binary_op_list_mul_none(self): 'test correct handling on list multiplied by None' ast = builder.string_build('a = [1] * None\nb = [1] * "r"') inferred = ast['a'].inferred() self.assertEqual(len(inferred), 1) self.assertEqual(inferred[0], util.Uninferable) inferred = ast['b'].inferred() self.assertEqual(len(inferred), 1) self.assertEqual(inferred[0], util.Uninferable) def test_binary_op_list_mul_int(self): 'test correct handling on list multiplied by int when there are more than one' code = ''' from ctypes import c_int seq = [c_int()] * 4 ''' ast = parse(code, __name__) inferred = ast['seq'].inferred() self.assertEqual(len(inferred), 1) listval = inferred[0] self.assertIsInstance(listval, nodes.List) self.assertEqual(len(listval.itered()), 4) def test_binary_op_on_self(self): 'test correct handling of applying binary operator to self' code = ''' import sys sys.path = ['foo'] + sys.path sys.path.insert(0, 'bar') path = sys.path ''' ast = parse(code, __name__) inferred = ast['path'].inferred() self.assertIsInstance(inferred[0], nodes.List) def test_binary_op_tuple_add(self): ast = builder.string_build('a = (1,) + (2,)', __name__, __file__) inferred = list(ast['a'].infer()) self.assertEqual(len(inferred), 1) self.assertIsInstance(inferred[0], nodes.Tuple) self.assertEqual(len(inferred[0].elts), 2) self.assertEqual(inferred[0].elts[0].value, 1) self.assertEqual(inferred[0].elts[1].value, 2) def test_binary_op_custom_class(self): code = ''' class myarray: def __init__(self, array): self.array = array def __mul__(self, x): return myarray([2,4,6]) def astype(self): return "ASTYPE" def randint(maximum): if maximum is not None: return myarray([1,2,3]) * 2 else: return int(5) x = randint(1) ''' ast = parse(code, __name__) inferred = list(ast.igetattr('x')) self.assertEqual(len(inferred), 2) value = [str(v) for v in inferred] # The __name__ trick here makes it work when invoked directly # (__name__ == '__main__') and through pytest (__name__ == # 'unittest_inference') self.assertEqual(value, ['Instance of %s.myarray' % __name__, 'Const.int(value=5)']) def test_nonregr_lambda_arg(self): code = ''' def f(g = lambda: None): __(g()).x ''' callfuncnode = extract_node(code) inferred = list(callfuncnode.infer()) self.assertEqual(len(inferred), 2, inferred) inferred.remove(util.Uninferable) self.assertIsInstance(inferred[0], nodes.Const) self.assertIsNone(inferred[0].value) def test_nonregr_getitem_empty_tuple(self): code = ''' def f(x): a = ()[x] ''' ast = parse(code, __name__) inferred = list(ast['f'].ilookup('a')) self.assertEqual(len(inferred), 1) self.assertEqual(inferred[0], util.Uninferable) def test_nonregr_instance_attrs(self): """non regression for instance_attrs infinite loop : pylint / #4""" code = """ class Foo(object): def set_42(self): self.attr = 42 class Bar(Foo): def __init__(self): self.attr = 41 """ ast = parse(code, __name__) foo_class = ast['Foo'] bar_class = ast['Bar'] bar_self = ast['Bar']['__init__']['self'] assattr = bar_class.instance_attrs['attr'][0] self.assertEqual(len(foo_class.instance_attrs['attr']), 1) self.assertEqual(len(bar_class.instance_attrs['attr']), 1) self.assertEqual(bar_class.instance_attrs, {'attr': [assattr]}) # call 'instance_attr' via 'Instance.getattr' to trigger the bug: instance = bar_self.inferred()[0] instance.getattr('attr') self.assertEqual(len(bar_class.instance_attrs['attr']), 1) self.assertEqual(len(foo_class.instance_attrs['attr']), 1) self.assertEqual(bar_class.instance_attrs, {'attr': [assattr]}) def test_nonregr_multi_referential_addition(self): """Regression test for https://github.com/PyCQA/astroid/issues/483 Make sure issue where referring to the same variable in the same inferred expression caused an uninferable result. """ code = """ b = 1 a = b + b a #@ """ variable_a = extract_node(code) self.assertEqual(variable_a.inferred()[0].value, 2) @test_utils.require_version(minver='3.5') def test_nonregr_layed_dictunpack(self): """Regression test for https://github.com/PyCQA/astroid/issues/483 Make sure mutliple dictunpack references are inferable """ code = """ base = {'data': 0} new = {**base, 'data': 1} new3 = {**base, **new} new3 #@ """ ass = extract_node(code) self.assertIsInstance(ass.inferred()[0], nodes.Dict) def test_nonregr_inference_modifying_col_offset(self): """Make sure inference doesn't improperly modify col_offset Regression test for https://github.com/PyCQA/pylint/issues/1839 """ code = """ class F: def _(self): return type(self).f """ mod = parse(code) cdef = mod.body[0] call = cdef.body[0].body[0].value.expr orig_offset = cdef.col_offset call.inferred() self.assertEqual(cdef.col_offset, orig_offset) def test_python25_no_relative_import(self): ast = resources.build_file('data/package/absimport.py') self.assertTrue(ast.absolute_import_activated(), True) inferred = next(test_utils.get_name_node(ast, 'import_package_subpackage_module').infer()) # failed to import since absolute_import is activated self.assertIs(inferred, util.Uninferable) def test_nonregr_absolute_import(self): ast = resources.build_file('data/absimp/string.py', 'data.absimp.string') self.assertTrue(ast.absolute_import_activated(), True) inferred = next(test_utils.get_name_node(ast, 'string').infer()) self.assertIsInstance(inferred, nodes.Module) self.assertEqual(inferred.name, 'string') self.assertIn('ascii_letters', inferred.locals) def test_mechanize_open(self): try: import mechanize # pylint: disable=unused-variable except ImportError: self.skipTest('require mechanize installed') data = ''' from mechanize import Browser print(Browser) b = Browser() ''' ast = parse(data, __name__) browser = next(test_utils.get_name_node(ast, 'Browser').infer()) self.assertIsInstance(browser, nodes.ClassDef) bopen = list(browser.igetattr('open')) self.skipTest('the commit said: "huum, see that later"') self.assertEqual(len(bopen), 1) self.assertIsInstance(bopen[0], nodes.FunctionDef) self.assertTrue(bopen[0].callable()) b = next(test_utils.get_name_node(ast, 'b').infer()) self.assertIsInstance(b, Instance) bopen = list(b.igetattr('open')) self.assertEqual(len(bopen), 1) self.assertIsInstance(bopen[0], BoundMethod) self.assertTrue(bopen[0].callable()) def test_property(self): code = ''' from smtplib import SMTP class SendMailController(object): @property def smtp(self): return SMTP(mailhost, port) @property def me(self): return self my_smtp = SendMailController().smtp my_me = SendMailController().me ''' decorators = {'%s.property' % BUILTINS} ast = parse(code, __name__) self.assertEqual(ast['SendMailController']['smtp'].decoratornames(), decorators) propinferred = list(ast.body[2].value.infer()) self.assertEqual(len(propinferred), 1) propinferred = propinferred[0] self.assertIsInstance(propinferred, Instance) self.assertEqual(propinferred.name, 'SMTP') self.assertEqual(propinferred.root().name, 'smtplib') self.assertEqual(ast['SendMailController']['me'].decoratornames(), decorators) propinferred = list(ast.body[3].value.infer()) self.assertEqual(len(propinferred), 1) propinferred = propinferred[0] self.assertIsInstance(propinferred, Instance) self.assertEqual(propinferred.name, 'SendMailController') self.assertEqual(propinferred.root().name, __name__) def test_im_func_unwrap(self): code = ''' class EnvBasedTC: def pactions(self): pass pactions = EnvBasedTC.pactions.im_func print (pactions) class EnvBasedTC2: pactions = EnvBasedTC.pactions.im_func print (pactions) ''' ast = parse(code, __name__) pactions = test_utils.get_name_node(ast, 'pactions') inferred = list(pactions.infer()) self.assertEqual(len(inferred), 1) self.assertIsInstance(inferred[0], nodes.FunctionDef) pactions = test_utils.get_name_node(ast['EnvBasedTC2'], 'pactions') inferred = list(pactions.infer()) self.assertEqual(len(inferred), 1) self.assertIsInstance(inferred[0], nodes.FunctionDef) def test_augassign(self): code = ''' a = 1 a += 2 print (a) ''' ast = parse(code, __name__) inferred = list(test_utils.get_name_node(ast, 'a').infer()) self.assertEqual(len(inferred), 1) self.assertIsInstance(inferred[0], nodes.Const) self.assertEqual(inferred[0].value, 3) def test_nonregr_func_arg(self): code = ''' def foo(self, bar): def baz(): pass def qux(): return baz spam = bar(None, qux) print (spam) ''' ast = parse(code, __name__) inferred = list(test_utils.get_name_node(ast['foo'], 'spam').infer()) self.assertEqual(len(inferred), 1) self.assertIs(inferred[0], util.Uninferable) def test_nonregr_func_global(self): code = ''' active_application = None def get_active_application(): global active_application return active_application class Application(object): def __init__(self): global active_application active_application = self class DataManager(object): def __init__(self, app=None): self.app = get_active_application() def test(self): p = self.app print (p) ''' ast = parse(code, __name__) inferred = list(Instance(ast['DataManager']).igetattr('app')) self.assertEqual(len(inferred), 2, inferred) # None / Instance(Application) inferred = list(test_utils.get_name_node(ast['DataManager']['test'], 'p').infer()) self.assertEqual(len(inferred), 2, inferred) for node in inferred: if isinstance(node, Instance) and node.name == 'Application': break else: self.fail('expected to find an instance of Application in %s' % inferred) def test_list_inference(self): """#20464""" code = ''' from unknown import Unknown A = [] B = [] def test(): xyz = [ Unknown ] + A + B return xyz Z = test() ''' ast = parse(code, __name__) inferred = next(ast['Z'].infer()) self.assertIsInstance(inferred, nodes.List) self.assertEqual(len(inferred.elts), 1) self.assertIsInstance(inferred.elts[0], nodes.Unknown) def test__new__(self): code = ''' class NewTest(object): "doc" def __new__(cls, arg): self = object.__new__(cls) self.arg = arg return self n = NewTest() ''' ast = parse(code, __name__) self.assertRaises(InferenceError, list, ast['NewTest'].igetattr('arg')) n = next(ast['n'].infer()) inferred = list(n.igetattr('arg')) self.assertEqual(len(inferred), 1, inferred) def test__new__bound_methods(self): node = extract_node(''' class cls(object): pass cls().__new__(cls) #@ ''') inferred = next(node.infer()) self.assertIsInstance(inferred, Instance) self.assertEqual(inferred._proxied, node.root()['cls']) def test_two_parents_from_same_module(self): code = ''' from data import nonregr class Xxx(nonregr.Aaa, nonregr.Ccc): "doc" ''' ast = parse(code, __name__) parents = list(ast['Xxx'].ancestors()) self.assertEqual(len(parents), 3, parents) # Aaa, Ccc, object def test_pluggable_inference(self): code = ''' from collections import namedtuple A = namedtuple('A', ['a', 'b']) B = namedtuple('B', 'a b') ''' ast = parse(code, __name__) aclass = ast['A'].inferred()[0] self.assertIsInstance(aclass, nodes.ClassDef) self.assertIn('a', aclass.instance_attrs) self.assertIn('b', aclass.instance_attrs) bclass = ast['B'].inferred()[0] self.assertIsInstance(bclass, nodes.ClassDef) self.assertIn('a', bclass.instance_attrs) self.assertIn('b', bclass.instance_attrs) def test_infer_arguments(self): code = ''' class A(object): def first(self, arg1, arg2): return arg1 @classmethod def method(cls, arg1, arg2): return arg2 @classmethod def empty(cls): return 2 @staticmethod def static(arg1, arg2): return arg1 def empty_method(self): return [] x = A().first(1, []) y = A.method(1, []) z = A.static(1, []) empty = A.empty() empty_list = A().empty_method() ''' ast = parse(code, __name__) int_node = ast['x'].inferred()[0] self.assertIsInstance(int_node, nodes.Const) self.assertEqual(int_node.value, 1) list_node = ast['y'].inferred()[0] self.assertIsInstance(list_node, nodes.List) int_node = ast['z'].inferred()[0] self.assertIsInstance(int_node, nodes.Const) self.assertEqual(int_node.value, 1) empty = ast['empty'].inferred()[0] self.assertIsInstance(empty, nodes.Const) self.assertEqual(empty.value, 2) empty_list = ast['empty_list'].inferred()[0] self.assertIsInstance(empty_list, nodes.List) def test_infer_variable_arguments(self): code = ''' def test(*args, **kwargs): vararg = args kwarg = kwargs ''' ast = parse(code, __name__) func = ast['test'] vararg = func.body[0].value kwarg = func.body[1].value kwarg_inferred = kwarg.inferred()[0] self.assertIsInstance(kwarg_inferred, nodes.Dict) self.assertIs(kwarg_inferred.parent, func.args) vararg_inferred = vararg.inferred()[0] self.assertIsInstance(vararg_inferred, nodes.Tuple) self.assertIs(vararg_inferred.parent, func.args) def test_infer_nested(self): code = """ def nested(): from threading import Thread class NestedThread(Thread): def __init__(self): Thread.__init__(self) """ # Test that inferring Thread.__init__ looks up in # the nested scope. ast = parse(code, __name__) callfunc = next(ast.nodes_of_class(nodes.Call)) func = callfunc.func inferred = func.inferred()[0] self.assertIsInstance(inferred, UnboundMethod) def test_instance_binary_operations(self): code = """ class A(object): def __mul__(self, other): return 42 a = A() b = A() sub = a - b mul = a * b """ ast = parse(code, __name__) sub = ast['sub'].inferred()[0] mul = ast['mul'].inferred()[0] self.assertIs(sub, util.Uninferable) self.assertIsInstance(mul, nodes.Const) self.assertEqual(mul.value, 42) def test_instance_binary_operations_parent(self): code = """ class A(object): def __mul__(self, other): return 42 class B(A): pass a = B() b = B() sub = a - b mul = a * b """ ast = parse(code, __name__) sub = ast['sub'].inferred()[0] mul = ast['mul'].inferred()[0] self.assertIs(sub, util. Uninferable) self.assertIsInstance(mul, nodes.Const) self.assertEqual(mul.value, 42) def test_instance_binary_operations_multiple_methods(self): code = """ class A(object): def __mul__(self, other): return 42 class B(A): def __mul__(self, other): return [42] a = B() b = B() sub = a - b mul = a * b """ ast = parse(code, __name__) sub = ast['sub'].inferred()[0] mul = ast['mul'].inferred()[0] self.assertIs(sub, util.Uninferable) self.assertIsInstance(mul, nodes.List) self.assertIsInstance(mul.elts[0], nodes.Const) self.assertEqual(mul.elts[0].value, 42) def test_infer_call_result_crash(self): code = """ class A(object): def __mul__(self, other): return type.__new__() a = A() b = A() c = a * b """ ast = parse(code, __name__) node = ast['c'] self.assertEqual(node.inferred(), [util.Uninferable]) def test_infer_empty_nodes(self): # Should not crash when trying to infer EmptyNodes. node = nodes.EmptyNode() self.assertEqual(node.inferred(), [util.Uninferable]) def test_infinite_loop_for_decorators(self): # Issue https://bitbucket.org/logilab/astroid/issue/50 # A decorator that returns itself leads to an infinite loop. code = """ def decorator(): def wrapper(): return decorator() return wrapper @decorator() def do_a_thing(): pass """ ast = parse(code, __name__) node = ast['do_a_thing'] self.assertEqual(node.type, 'function') def test_no_infinite_ancestor_loop(self): klass = extract_node(""" import datetime def method(self): datetime.datetime = something() class something(datetime.datetime): #@ pass """) self.assertIn( 'object', [base.name for base in klass.ancestors()]) def test_stop_iteration_leak(self): code = """ class Test: def __init__(self): self.config = {0: self.config[0]} self.config[0].test() #@ """ ast = extract_node(code, __name__) expr = ast.func.expr self.assertRaises(InferenceError, next, expr.infer()) def test_tuple_builtin_inference(self): code = """ var = (1, 2) tuple() #@ tuple([1]) #@ tuple({2}) #@ tuple("abc") #@ tuple({1: 2}) #@ tuple(var) #@ tuple(tuple([1])) #@ tuple(frozenset((1, 2))) #@ tuple(None) #@ tuple(1) #@ tuple(1, 2) #@ """ ast = extract_node(code, __name__) self.assertInferTuple(ast[0], []) self.assertInferTuple(ast[1], [1]) self.assertInferTuple(ast[2], [2]) self.assertInferTuple(ast[3], ["a", "b", "c"]) self.assertInferTuple(ast[4], [1]) self.assertInferTuple(ast[5], [1, 2]) self.assertInferTuple(ast[6], [1]) self.assertInferTuple(ast[7], [1, 2]) for node in ast[8:]: inferred = next(node.infer()) self.assertIsInstance(inferred, Instance) self.assertEqual(inferred.qname(), "{}.tuple".format(BUILTINS)) @test_utils.require_version('3.5') def test_starred_in_tuple_literal(self): code = """ var = (1, 2, 3) bar = (5, 6, 7) foo = [999, 1000, 1001] (0, *var) #@ (0, *var, 4) #@ (0, *var, 4, *bar) #@ (0, *var, 4, *(*bar, 8)) #@ (0, *var, 4, *(*bar, *foo)) #@ """ ast = extract_node(code, __name__) self.assertInferTuple(ast[0], [0, 1, 2, 3]) self.assertInferTuple(ast[1], [0, 1, 2, 3, 4]) self.assertInferTuple(ast[2], [0, 1, 2, 3, 4, 5, 6, 7]) self.assertInferTuple(ast[3], [0, 1, 2, 3, 4, 5, 6, 7, 8]) self.assertInferTuple(ast[4], [0, 1, 2, 3, 4, 5, 6, 7, 999, 1000, 1001]) @test_utils.require_version('3.5') def test_starred_in_list_literal(self): code = """ var = (1, 2, 3) bar = (5, 6, 7) foo = [999, 1000, 1001] [0, *var] #@ [0, *var, 4] #@ [0, *var, 4, *bar] #@ [0, *var, 4, *[*bar, 8]] #@ [0, *var, 4, *[*bar, *foo]] #@ """ ast = extract_node(code, __name__) self.assertInferList(ast[0], [0, 1, 2, 3]) self.assertInferList(ast[1], [0, 1, 2, 3, 4]) self.assertInferList(ast[2], [0, 1, 2, 3, 4, 5, 6, 7]) self.assertInferList(ast[3], [0, 1, 2, 3, 4, 5, 6, 7, 8]) self.assertInferList(ast[4], [0, 1, 2, 3, 4, 5, 6, 7, 999, 1000, 1001]) @test_utils.require_version('3.5') def test_starred_in_set_literal(self): code = """ var = (1, 2, 3) bar = (5, 6, 7) foo = [999, 1000, 1001] {0, *var} #@ {0, *var, 4} #@ {0, *var, 4, *bar} #@ {0, *var, 4, *{*bar, 8}} #@ {0, *var, 4, *{*bar, *foo}} #@ """ ast = extract_node(code, __name__) self.assertInferSet(ast[0], [0, 1, 2, 3]) self.assertInferSet(ast[1], [0, 1, 2, 3, 4]) self.assertInferSet(ast[2], [0, 1, 2, 3, 4, 5, 6, 7]) self.assertInferSet(ast[3], [0, 1, 2, 3, 4, 5, 6, 7, 8]) self.assertInferSet(ast[4], [0, 1, 2, 3, 4, 5, 6, 7, 999, 1000, 1001]) @test_utils.require_version('3.5') def test_starred_in_literals_inference_issues(self): code = """ {0, *var} #@ {0, *var, 4} #@ {0, *var, 4, *bar} #@ {0, *var, 4, *{*bar, 8}} #@ {0, *var, 4, *{*bar, *foo}} #@ """ ast = extract_node(code, __name__) for node in ast: with self.assertRaises(InferenceError): next(node.infer()) @test_utils.require_version('3.5') def test_starred_in_mapping_literal(self): code = """ var = {1: 'b', 2: 'c'} bar = {4: 'e', 5: 'f'} {0: 'a', **var} #@ {0: 'a', **var, 3: 'd'} #@ {0: 'a', **var, 3: 'd', **{**bar, 6: 'g'}} #@ """ ast = extract_node(code, __name__) self.assertInferDict(ast[0], {0: 'a', 1: 'b', 2: 'c'}) self.assertInferDict(ast[1], {0: 'a', 1: 'b', 2: 'c', 3: 'd'}) self.assertInferDict(ast[2], {0: 'a', 1: 'b', 2: 'c', 3: 'd', 4: 'e', 5: 'f', 6: 'g'}) @test_utils.require_version('3.5') def test_starred_in_mapping_literal_no_inference_possible(self): node = extract_node(''' from unknown import unknown def test(a): return a + 1 def func(): a = {unknown: 'a'} return {0: 1, **a} test(**func()) ''') self.assertEqual(next(node.infer()), util.Uninferable) @test_utils.require_version('3.5') def test_starred_in_mapping_inference_issues(self): code = """ {0: 'a', **var} #@ {0: 'a', **var, 3: 'd'} #@ {0: 'a', **var, 3: 'd', **{**bar, 6: 'g'}} #@ """ ast = extract_node(code, __name__) for node in ast: with self.assertRaises(InferenceError): next(node.infer()) @test_utils.require_version('3.5') def test_starred_in_mapping_literal_non_const_keys_values(self): code = """ a, b, c, d, e, f, g, h, i, j = "ABCDEFGHIJ" var = {c: d, e: f} bar = {i: j} {a: b, **var} #@ {a: b, **var, **{g: h, **bar}} #@ """ ast = extract_node(code, __name__) self.assertInferDict(ast[0], {"A": "B", "C": "D", "E": "F"}) self.assertInferDict(ast[1], {"A": "B", "C": "D", "E": "F", "G": "H", "I": "J"}) def test_frozenset_builtin_inference(self): code = """ var = (1, 2) frozenset() #@ frozenset([1, 2, 1]) #@ frozenset({2, 3, 1}) #@ frozenset("abcab") #@ frozenset({1: 2}) #@ frozenset(var) #@ frozenset(tuple([1])) #@ frozenset(set(tuple([4, 5, set([2])]))) #@ frozenset(None) #@ frozenset(1) #@ frozenset(1, 2) #@ """ ast = extract_node(code, __name__) self.assertInferFrozenSet(ast[0], []) self.assertInferFrozenSet(ast[1], [1, 2]) self.assertInferFrozenSet(ast[2], [1, 2, 3]) self.assertInferFrozenSet(ast[3], ["a", "b", "c"]) self.assertInferFrozenSet(ast[4], [1]) self.assertInferFrozenSet(ast[5], [1, 2]) self.assertInferFrozenSet(ast[6], [1]) for node in ast[7:]: inferred = next(node.infer()) self.assertIsInstance(inferred, Instance) self.assertEqual(inferred.qname(), "{}.frozenset".format(BUILTINS)) def test_set_builtin_inference(self): code = """ var = (1, 2) set() #@ set([1, 2, 1]) #@ set({2, 3, 1}) #@ set("abcab") #@ set({1: 2}) #@ set(var) #@ set(tuple([1])) #@ set(set(tuple([4, 5, set([2])]))) #@ set(None) #@ set(1) #@ set(1, 2) #@ """ ast = extract_node(code, __name__) self.assertInferSet(ast[0], []) self.assertInferSet(ast[1], [1, 2]) self.assertInferSet(ast[2], [1, 2, 3]) self.assertInferSet(ast[3], ["a", "b", "c"]) self.assertInferSet(ast[4], [1]) self.assertInferSet(ast[5], [1, 2]) self.assertInferSet(ast[6], [1]) for node in ast[7:]: inferred = next(node.infer()) self.assertIsInstance(inferred, Instance) self.assertEqual(inferred.qname(), "{}.set".format(BUILTINS)) def test_list_builtin_inference(self): code = """ var = (1, 2) list() #@ list([1, 2, 1]) #@ list({2, 3, 1}) #@ list("abcab") #@ list({1: 2}) #@ list(var) #@ list(tuple([1])) #@ list(list(tuple([4, 5, list([2])]))) #@ list(None) #@ list(1) #@ list(1, 2) #@ """ ast = extract_node(code, __name__) self.assertInferList(ast[0], []) self.assertInferList(ast[1], [1, 1, 2]) self.assertInferList(ast[2], [1, 2, 3]) self.assertInferList(ast[3], ["a", "a", "b", "b", "c"]) self.assertInferList(ast[4], [1]) self.assertInferList(ast[5], [1, 2]) self.assertInferList(ast[6], [1]) for node in ast[7:]: inferred = next(node.infer()) self.assertIsInstance(inferred, Instance) self.assertEqual(inferred.qname(), "{}.list".format(BUILTINS)) def test_conversion_of_dict_methods(self): ast_nodes = extract_node(''' list({1:2, 2:3}.values()) #@ list({1:2, 2:3}.keys()) #@ tuple({1:2, 2:3}.values()) #@ tuple({1:2, 3:4}.keys()) #@ set({1:2, 2:4}.keys()) #@ ''') self.assertInferList(ast_nodes[0], [2, 3]) self.assertInferList(ast_nodes[1], [1, 2]) self.assertInferTuple(ast_nodes[2], [2, 3]) self.assertInferTuple(ast_nodes[3], [1, 3]) self.assertInferSet(ast_nodes[4], [1, 2]) @test_utils.require_version('3.0') def test_builtin_inference_py3k(self): code = """ list(b"abc") #@ tuple(b"abc") #@ set(b"abc") #@ """ ast = extract_node(code, __name__) self.assertInferList(ast[0], [97, 98, 99]) self.assertInferTuple(ast[1], [97, 98, 99]) self.assertInferSet(ast[2], [97, 98, 99]) def test_dict_inference(self): code = """ dict() #@ dict(a=1, b=2, c=3) #@ dict([(1, 2), (2, 3)]) #@ dict([[1, 2], [2, 3]]) #@ dict([(1, 2), [2, 3]]) #@ dict([('a', 2)], b=2, c=3) #@ dict({1: 2}) #@ dict({'c': 2}, a=4, b=5) #@ def func(): return dict(a=1, b=2) func() #@ var = {'x': 2, 'y': 3} dict(var, a=1, b=2) #@ dict([1, 2, 3]) #@ dict([(1, 2), (1, 2, 3)]) #@ dict({1: 2}, {1: 2}) #@ dict({1: 2}, (1, 2)) #@ dict({1: 2}, (1, 2), a=4) #@ dict([(1, 2), ([4, 5], 2)]) #@ dict([None, None]) #@ def using_unknown_kwargs(**kwargs): return dict(**kwargs) using_unknown_kwargs(a=1, b=2) #@ """ ast = extract_node(code, __name__) self.assertInferDict(ast[0], {}) self.assertInferDict(ast[1], {'a': 1, 'b': 2, 'c': 3}) for i in range(2, 5): self.assertInferDict(ast[i], {1: 2, 2: 3}) self.assertInferDict(ast[5], {'a': 2, 'b': 2, 'c': 3}) self.assertInferDict(ast[6], {1: 2}) self.assertInferDict(ast[7], {'c': 2, 'a': 4, 'b': 5}) self.assertInferDict(ast[8], {'a': 1, 'b': 2}) self.assertInferDict(ast[9], {'x': 2, 'y': 3, 'a': 1, 'b': 2}) for node in ast[10:]: inferred = next(node.infer()) self.assertIsInstance(inferred, Instance) self.assertEqual(inferred.qname(), "{}.dict".format(BUILTINS)) def test_dict_inference_kwargs(self): ast_node = extract_node('''dict(a=1, b=2, **{'c': 3})''') self.assertInferDict(ast_node, {'a': 1, 'b': 2, 'c': 3}) @test_utils.require_version('3.5') def test_dict_inference_for_multiple_starred(self): pairs = [ ('dict(a=1, **{"b": 2}, **{"c":3})', {'a':1, 'b':2, 'c':3}), ('dict(a=1, **{"b": 2}, d=4, **{"c":3})', {'a':1, 'b':2, 'c':3, 'd':4}), ('dict({"a":1}, b=2, **{"c":3})', {'a':1, 'b':2, 'c':3}), ] for code, expected_value in pairs: node = extract_node(code) self.assertInferDict(node, expected_value) @test_utils.require_version('3.5') def test_dict_inference_unpack_repeated_key(self): """Make sure astroid does not infer repeated keys in a dictionary Regression test for https://github.com/PyCQA/pylint/issues/1843 """ code = """ base = {'data': 0} new = {**base, 'data': 1} #@ new2 = {'data': 1, **base} #@ # Make sure overwrite works a = 'd' + 'ata' b3 = {**base, a: 3} #@ Make sure keys are properly inferred b4 = {a: 3, **base} #@ """ ast = extract_node(code) final_values = ( "{'data': 1}", "{'data': 0}", "{'data': 3}", "{'data': 0}", ) for node, final_value in zip(ast, final_values): assert node.targets[0].inferred()[0].as_string() == final_value def test_dict_invalid_args(self): invalid_values = [ 'dict(*1)', 'dict(**lala)', 'dict(**[])', ] for invalid in invalid_values: ast_node = extract_node(invalid) inferred = next(ast_node.infer()) self.assertIsInstance(inferred, Instance) self.assertEqual(inferred.qname(), "{}.dict".format(BUILTINS)) def test_str_methods(self): code = """ ' '.decode() #@ ' '.encode() #@ ' '.join('abcd') #@ ' '.replace('a', 'b') #@ ' '.format('a') #@ ' '.capitalize() #@ ' '.title() #@ ' '.lower() #@ ' '.upper() #@ ' '.swapcase() #@ ' '.strip() #@ ' '.rstrip() #@ ' '.lstrip() #@ ' '.rjust() #@ ' '.ljust() #@ ' '.center() #@ ' '.index() #@ ' '.find() #@ ' '.count() #@ """ ast = extract_node(code, __name__) self.assertInferConst(ast[0], '') for i in range(1, 16): self.assertInferConst(ast[i], '') for i in range(16, 19): self.assertInferConst(ast[i], 0) def test_unicode_methods(self): code = """ u' '.encode() #@ u' '.decode() #@ u' '.join('abcd') #@ u' '.replace('a', 'b') #@ u' '.format('a') #@ u' '.capitalize() #@ u' '.title() #@ u' '.lower() #@ u' '.upper() #@ u' '.swapcase() #@ u' '.strip() #@ u' '.rstrip() #@ u' '.lstrip() #@ u' '.rjust() #@ u' '.ljust() #@ u' '.center() #@ u' '.index() #@ u' '.find() #@ u' '.count() #@ """ ast = extract_node(code, __name__) self.assertInferConst(ast[0], '') for i in range(1, 16): self.assertInferConst(ast[i], '') for i in range(16, 19): self.assertInferConst(ast[i], 0) def test_scope_lookup_same_attributes(self): code = ''' import collections class Second(collections.Counter): def collections(self): return "second" ''' ast = parse(code, __name__) bases = ast['Second'].bases[0] inferred = next(bases.infer()) self.assertTrue(inferred) self.assertIsInstance(inferred, nodes.ClassDef) self.assertEqual(inferred.qname(), 'collections.Counter') def test_inferring_with_statement_failures(self): module = parse(''' class NoEnter(object): pass class NoMethod(object): __enter__ = None class NoElts(object): def __enter__(self): return 42 with NoEnter() as no_enter: pass with NoMethod() as no_method: pass with NoElts() as (no_elts, no_elts1): pass ''') self.assertRaises(InferenceError, next, module['no_enter'].infer()) self.assertRaises(InferenceError, next, module['no_method'].infer()) self.assertRaises(InferenceError, next, module['no_elts'].infer()) def test_inferring_with_statement(self): module = parse(''' class SelfContext(object): def __enter__(self): return self class OtherContext(object): def __enter__(self): return SelfContext() class MultipleReturns(object): def __enter__(self): return SelfContext(), OtherContext() class MultipleReturns2(object): def __enter__(self): return [1, [2, 3]] with SelfContext() as self_context: pass with OtherContext() as other_context: pass with MultipleReturns(), OtherContext() as multiple_with: pass with MultipleReturns2() as (stdout, (stderr, stdin)): pass ''') self_context = module['self_context'] inferred = next(self_context.infer()) self.assertIsInstance(inferred, Instance) self.assertEqual(inferred.name, 'SelfContext') other_context = module['other_context'] inferred = next(other_context.infer()) self.assertIsInstance(inferred, Instance) self.assertEqual(inferred.name, 'SelfContext') multiple_with = module['multiple_with'] inferred = next(multiple_with.infer()) self.assertIsInstance(inferred, Instance) self.assertEqual(inferred.name, 'SelfContext') stdout = module['stdout'] inferred = next(stdout.infer()) self.assertIsInstance(inferred, nodes.Const) self.assertEqual(inferred.value, 1) stderr = module['stderr'] inferred = next(stderr.infer()) self.assertIsInstance(inferred, nodes.Const) self.assertEqual(inferred.value, 2) def test_inferring_with_contextlib_contextmanager(self): module = parse(''' import contextlib from contextlib import contextmanager @contextlib.contextmanager def manager_none(): try: yield finally: pass @contextlib.contextmanager def manager_something(): try: yield 42 yield 24 # This should be ignored. finally: pass @contextmanager def manager_multiple(): with manager_none() as foo: with manager_something() as bar: yield foo, bar with manager_none() as none: pass with manager_something() as something: pass with manager_multiple() as (first, second): pass ''') none = module['none'] inferred = next(none.infer()) self.assertIsInstance(inferred, nodes.Const) self.assertIsNone(inferred.value) something = module['something'] inferred = something.inferred() self.assertEqual(len(inferred), 1) inferred = inferred[0] self.assertIsInstance(inferred, nodes.Const) self.assertEqual(inferred.value, 42) first, second = module['first'], module['second'] first = next(first.infer()) second = next(second.infer()) self.assertIsInstance(first, nodes.Const) self.assertIsNone(first.value) self.assertIsInstance(second, nodes.Const) self.assertEqual(second.value, 42) def test_inferring_context_manager_skip_index_error(self): # Raise an InferenceError when having multiple 'as' bindings # from a context manager, but its result doesn't have those # indices. This is the case of contextlib.nested, where the # result is a list, which is mutated later on, so it's # undetected by astroid. module = parse(''' class Manager(object): def __enter__(self): return [] with Manager() as (a, b, c): pass ''') self.assertRaises(InferenceError, next, module['a'].infer()) def test_inferring_context_manager_unpacking_inference_error(self): # https://github.com/PyCQA/pylint/issues/1463 module = parse(''' import contextlib @contextlib.contextmanager def _select_source(a=None): with _select_source() as result: yield result result = _select_source() with result as (a, b, c): pass ''') self.assertRaises(InferenceError, next, module['a'].infer()) def test_inferring_with_contextlib_contextmanager_failures(self): module = parse(''' from contextlib import contextmanager def no_decorators_mgr(): yield @no_decorators_mgr def other_decorators_mgr(): yield @contextmanager def no_yield_mgr(): pass with no_decorators_mgr() as no_decorators: pass with other_decorators_mgr() as other_decorators: pass with no_yield_mgr() as no_yield: pass ''') self.assertRaises(InferenceError, next, module['no_decorators'].infer()) self.assertRaises(InferenceError, next, module['other_decorators'].infer()) self.assertRaises(InferenceError, next, module['no_yield'].infer()) def test_nested_contextmanager(self): """Make sure contextmanager works with nested functions Previously contextmanager would retrieve the first yield instead of the yield in the proper scope Fixes https://github.com/PyCQA/pylint/issues/1746 """ code = """ from contextlib import contextmanager @contextmanager def outer(): @contextmanager def inner(): yield 2 yield inner with outer() as ctx: ctx #@ with ctx() as val: val #@ """ context_node, value_node = extract_node(code) value = next(value_node.infer()) context = next(context_node.infer()) assert isinstance(context, nodes.FunctionDef) assert isinstance(value, nodes.Const) def test_unary_op_leaks_stop_iteration(self): node = extract_node('+[] #@') self.assertEqual(util.Uninferable, next(node.infer())) def test_unary_operands(self): ast_nodes = extract_node(''' import os def func(): pass from missing import missing class GoodInstance(object): def __pos__(self): return 42 def __neg__(self): return +self - 41 def __invert__(self): return 42 class BadInstance(object): def __pos__(self): return lala def __neg__(self): return missing class LambdaInstance(object): __pos__ = lambda self: self.lala __neg__ = lambda self: self.lala + 1 @property def lala(self): return 24 instance = GoodInstance() lambda_instance = LambdaInstance() +instance #@ -instance #@ ~instance #@ --instance #@ +lambda_instance #@ -lambda_instance #@ bad_instance = BadInstance() +bad_instance #@ -bad_instance #@ ~bad_instance #@ # These should be TypeErrors. ~BadInstance #@ ~os #@ -func #@ +BadInstance #@ ''') expected = [42, 1, 42, -1, 24, 25] for node, value in zip(ast_nodes[:6], expected): inferred = next(node.infer()) self.assertIsInstance(inferred, nodes.Const) self.assertEqual(inferred.value, value) for bad_node in ast_nodes[6:]: inferred = next(bad_node.infer()) self.assertEqual(inferred, util.Uninferable) def test_unary_op_instance_method_not_callable(self): ast_node = extract_node(''' class A: __pos__ = (i for i in range(10)) +A() #@ ''') self.assertRaises(InferenceError, next, ast_node.infer()) def test_binary_op_type_errors(self): ast_nodes = extract_node(''' import collections 1 + "a" #@ 1 - [] #@ 1 * {} #@ 1 / collections #@ 1 ** (lambda x: x) #@ {} * {} #@ {} - {} #@ {} | {} #@ {} >> {} #@ [] + () #@ () + [] #@ [] * 2.0 #@ () * 2.0 #@ 2.0 >> 2.0 #@ class A(object): pass class B(object): pass A() + B() #@ class A1(object): def __add__(self, other): return NotImplemented A1() + A1() #@ class A(object): def __add__(self, other): return NotImplemented class B(object): def __radd__(self, other): return NotImplemented A() + B() #@ class Parent(object): pass class Child(Parent): def __add__(self, other): return NotImplemented Child() + Parent() #@ class A(object): def __add__(self, other): return NotImplemented class B(A): def __radd__(self, other): return NotImplemented A() + B() #@ # Augmented f = 1 f+=A() #@ x = 1 x+=[] #@ ''') msg = "unsupported operand type(s) for {op}: {lhs!r} and {rhs!r}" expected = [ msg.format(op="+", lhs="int", rhs="str"), msg.format(op="-", lhs="int", rhs="list"), msg.format(op="*", lhs="int", rhs="dict"), msg.format(op="/", lhs="int", rhs="module"), msg.format(op="**", lhs="int", rhs="function"), msg.format(op="*", lhs="dict", rhs="dict"), msg.format(op="-", lhs="dict", rhs="dict"), msg.format(op="|", lhs="dict", rhs="dict"), msg.format(op=">>", lhs="dict", rhs="dict"), msg.format(op="+", lhs="list", rhs="tuple"), msg.format(op="+", lhs="tuple", rhs="list"), msg.format(op="*", lhs="list", rhs="float"), msg.format(op="*", lhs="tuple", rhs="float"), msg.format(op=">>", lhs="float", rhs="float"), msg.format(op="+", lhs="A", rhs="B"), msg.format(op="+", lhs="A1", rhs="A1"), msg.format(op="+", lhs="A", rhs="B"), msg.format(op="+", lhs="Child", rhs="Parent"), msg.format(op="+", lhs="A", rhs="B"), msg.format(op="+=", lhs="int", rhs="A"), msg.format(op="+=", lhs="int", rhs="list"), ] for node, expected_value in zip(ast_nodes, expected): errors = node.type_errors() self.assertEqual(len(errors), 1) error = errors[0] self.assertEqual(str(error), expected_value) def test_unary_type_errors(self): ast_nodes = extract_node(''' import collections ~[] #@ ~() #@ ~dict() #@ ~{} #@ ~set() #@ -set() #@ -"" #@ ~"" #@ +"" #@ class A(object): pass ~(lambda: None) #@ ~A #@ ~A() #@ ~collections #@ ~2.0 #@ ''') msg = "bad operand type for unary {op}: {type}" expected = [ msg.format(op="~", type='list'), msg.format(op="~", type='tuple'), msg.format(op="~", type='dict'), msg.format(op="~", type='dict'), msg.format(op="~", type='set'), msg.format(op="-", type='set'), msg.format(op="-", type='str'), msg.format(op="~", type='str'), msg.format(op="+", type='str'), msg.format(op="~", type=''), msg.format(op="~", type='A'), msg.format(op="~", type='A'), msg.format(op="~", type='collections'), msg.format(op="~", type='float'), ] for node, expected_value in zip(ast_nodes, expected): errors = node.type_errors() self.assertEqual(len(errors), 1) error = errors[0] self.assertEqual(str(error), expected_value) def test_unary_empty_type_errors(self): # These aren't supported right now ast_nodes = extract_node(''' ~(2 and []) #@ -(0 or {}) #@ ''') expected = [ "bad operand type for unary ~: list", "bad operand type for unary -: dict", ] for node, expected_value in zip(ast_nodes, expected): errors = node.type_errors() self.assertEqual(len(errors), 1, (expected, node)) self.assertEqual(str(errors[0]), expected_value) def test_unary_type_errors_for_non_instance_objects(self): node = extract_node('~slice(1, 2, 3)') errors = node.type_errors() self.assertEqual(len(errors), 1) self.assertEqual( str(errors[0]), 'bad operand type for unary ~: slice' ) def test_bool_value_recursive(self): pairs = [ ('{}', False), ('{1:2}', True), ('()', False), ('(1, 2)', True), ('[]', False), ('[1,2]', True), ('frozenset()', False), ('frozenset((1, 2))', True), ] for code, expected in pairs: node = extract_node(code) inferred = next(node.infer()) self.assertEqual(inferred.bool_value(), expected) def test_genexpr_bool_value(self): node = extract_node('''(x for x in range(10))''') self.assertTrue(node.bool_value()) def test_name_bool_value(self): node = extract_node(''' x = 42 y = x y ''') self.assertIs(node.bool_value(), util.Uninferable) def test_bool_value(self): # Verify the truth value of nodes. module = parse(''' import collections collections_module = collections def function(): pass class Class(object): def method(self): pass dict_comp = {x:y for (x, y) in ((1, 2), (2, 3))} set_comp = {x for x in range(10)} list_comp = [x for x in range(10)] lambda_func = lambda: None unbound_method = Class.method instance = Class() bound_method = instance.method def generator_func(): yield def true_value(): return True generator = generator_func() bin_op = 1 + 2 bool_op = x and y callfunc = test() good_callfunc = true_value() compare = 2 < 3 const_str_true = 'testconst' const_str_false = '' ''') collections_module = next(module['collections_module'].infer()) self.assertTrue(collections_module.bool_value()) function = module['function'] self.assertTrue(function.bool_value()) klass = module['Class'] self.assertTrue(klass.bool_value()) dict_comp = next(module['dict_comp'].infer()) self.assertEqual(dict_comp, util.Uninferable) set_comp = next(module['set_comp'].infer()) self.assertEqual(set_comp, util.Uninferable) list_comp = next(module['list_comp'].infer()) self.assertEqual(list_comp, util.Uninferable) lambda_func = next(module['lambda_func'].infer()) self.assertTrue(lambda_func) unbound_method = next(module['unbound_method'].infer()) self.assertTrue(unbound_method) bound_method = next(module['bound_method'].infer()) self.assertTrue(bound_method) generator = next(module['generator'].infer()) self.assertTrue(generator) bin_op = module['bin_op'].parent.value self.assertIs(bin_op.bool_value(), util.Uninferable) bool_op = module['bool_op'].parent.value self.assertEqual(bool_op.bool_value(), util.Uninferable) callfunc = module['callfunc'].parent.value self.assertEqual(callfunc.bool_value(), util.Uninferable) good_callfunc = next(module['good_callfunc'].infer()) self.assertTrue(good_callfunc.bool_value()) compare = module['compare'].parent.value self.assertEqual(compare.bool_value(), util.Uninferable) def test_bool_value_instances(self): instances = extract_node(''' class FalseBoolInstance(object): def {bool}(self): return False class TrueBoolInstance(object): def {bool}(self): return True class FalseLenInstance(object): def __len__(self): return 0 class TrueLenInstance(object): def __len__(self): return 14 class AlwaysTrueInstance(object): pass class ErrorInstance(object): def __bool__(self): return lala def __len__(self): return lala class NonMethods(object): __bool__ = 1 __len__ = 2 FalseBoolInstance() #@ TrueBoolInstance() #@ FalseLenInstance() #@ TrueLenInstance() #@ AlwaysTrueInstance() #@ ErrorInstance() #@ '''.format(bool=BOOL_SPECIAL_METHOD)) expected = (False, True, False, True, True, util.Uninferable, util.Uninferable) for node, expected_value in zip(instances, expected): inferred = next(node.infer()) self.assertEqual(inferred.bool_value(), expected_value) def test_bool_value_variable(self): instance = extract_node(''' class VariableBoolInstance(object): def __init__(self, value): self.value = value def {bool}(self): return self.value not VariableBoolInstance(True) '''.format(bool=BOOL_SPECIAL_METHOD)) inferred = next(instance.infer()) self.assertIs(inferred.bool_value(), util.Uninferable) def test_infer_coercion_rules_for_floats_complex(self): ast_nodes = extract_node(''' 1 + 1.0 #@ 1 * 1.0 #@ 2 - 1.0 #@ 2 / 2.0 #@ 1 + 1j #@ 2 * 1j #@ 2 - 1j #@ 3 / 1j #@ ''') expected_values = [2.0, 1.0, 1.0, 1.0, 1 + 1j, 2j, 2 - 1j, -3j] for node, expected in zip(ast_nodes, expected_values): inferred = next(node.infer()) self.assertEqual(inferred.value, expected) def test_binop_list_with_elts(self): ast_node = extract_node(''' x = [A] * 1 [1] + x ''') inferred = next(ast_node.infer()) self.assertIsInstance(inferred, nodes.List) self.assertEqual(len(inferred.elts), 2) self.assertIsInstance(inferred.elts[0], nodes.Const) self.assertIsInstance(inferred.elts[1], nodes.Unknown) def test_binop_same_types(self): ast_nodes = extract_node(''' class A(object): def __add__(self, other): return 42 1 + 1 #@ 1 - 1 #@ "a" + "b" #@ A() + A() #@ ''') expected_values = [2, 0, "ab", 42] for node, expected in zip(ast_nodes, expected_values): inferred = next(node.infer()) self.assertIsInstance(inferred, nodes.Const) self.assertEqual(inferred.value, expected) def test_binop_different_types_reflected_only(self): node = extract_node(''' class A(object): pass class B(object): def __radd__(self, other): return other A() + B() #@ ''') inferred = next(node.infer()) self.assertIsInstance(inferred, Instance) self.assertEqual(inferred.name, 'A') def test_binop_different_types_unknown_bases(self): node = extract_node(''' from foo import bar class A(bar): pass class B(object): def __radd__(self, other): return other A() + B() #@ ''') inferred = next(node.infer()) self.assertIs(inferred, util.Uninferable) def test_binop_different_types_normal_not_implemented_and_reflected(self): node = extract_node(''' class A(object): def __add__(self, other): return NotImplemented class B(object): def __radd__(self, other): return other A() + B() #@ ''') inferred = next(node.infer()) self.assertIsInstance(inferred, Instance) self.assertEqual(inferred.name, 'A') def test_binop_different_types_no_method_implemented(self): node = extract_node(''' class A(object): pass class B(object): pass A() + B() #@ ''') inferred = next(node.infer()) self.assertEqual(inferred, util.Uninferable) def test_binop_different_types_reflected_and_normal_not_implemented(self): node = extract_node(''' class A(object): def __add__(self, other): return NotImplemented class B(object): def __radd__(self, other): return NotImplemented A() + B() #@ ''') inferred = next(node.infer()) self.assertEqual(inferred, util.Uninferable) def test_binop_subtype(self): node = extract_node(''' class A(object): pass class B(A): def __add__(self, other): return other B() + A() #@ ''') inferred = next(node.infer()) self.assertIsInstance(inferred, Instance) self.assertEqual(inferred.name, 'A') def test_binop_subtype_implemented_in_parent(self): node = extract_node(''' class A(object): def __add__(self, other): return other class B(A): pass B() + A() #@ ''') inferred = next(node.infer()) self.assertIsInstance(inferred, Instance) self.assertEqual(inferred.name, 'A') def test_binop_subtype_not_implemented(self): node = extract_node(''' class A(object): pass class B(A): def __add__(self, other): return NotImplemented B() + A() #@ ''') inferred = next(node.infer()) self.assertEqual(inferred, util.Uninferable) def test_binop_supertype(self): node = extract_node(''' class A(object): pass class B(A): def __radd__(self, other): return other A() + B() #@ ''') inferred = next(node.infer()) self.assertIsInstance(inferred, Instance) self.assertEqual(inferred.name, 'A') def test_binop_supertype_rop_not_implemented(self): node = extract_node(''' class A(object): def __add__(self, other): return other class B(A): def __radd__(self, other): return NotImplemented A() + B() #@ ''') inferred = next(node.infer()) self.assertIsInstance(inferred, Instance) self.assertEqual(inferred.name, 'B') def test_binop_supertype_both_not_implemented(self): node = extract_node(''' class A(object): def __add__(self): return NotImplemented class B(A): def __radd__(self, other): return NotImplemented A() + B() #@ ''') inferred = next(node.infer()) self.assertEqual(inferred, util.Uninferable) def test_binop_inferrence_errors(self): ast_nodes = extract_node(''' from unknown import Unknown class A(object): def __add__(self, other): return NotImplemented class B(object): def __add__(self, other): return Unknown A() + Unknown #@ Unknown + A() #@ B() + A() #@ A() + B() #@ ''') for node in ast_nodes: self.assertEqual(next(node.infer()), util.Uninferable) def test_binop_ambiguity(self): ast_nodes = extract_node(''' class A(object): def __add__(self, other): if isinstance(other, B): return NotImplemented if type(other) is type(self): return 42 return NotImplemented class B(A): pass class C(object): def __radd__(self, other): if isinstance(other, B): return 42 return NotImplemented A() + B() #@ B() + A() #@ A() + C() #@ C() + A() #@ ''') for node in ast_nodes: self.assertEqual(next(node.infer()), util.Uninferable) def test_metaclass__getitem__(self): ast_node = extract_node(''' class Meta(type): def __getitem__(cls, arg): return 24 import six @six.add_metaclass(Meta) class A(object): pass A['Awesome'] #@ ''') inferred = next(ast_node.infer()) self.assertIsInstance(inferred, nodes.Const) self.assertEqual(inferred.value, 24) def test_bin_op_classes(self): ast_node = extract_node(''' class Meta(type): def __or__(self, other): return 24 import six @six.add_metaclass(Meta) class A(object): pass A | A ''') inferred = next(ast_node.infer()) self.assertIsInstance(inferred, nodes.Const) self.assertEqual(inferred.value, 24) def test_bin_op_supertype_more_complicated_example(self): ast_node = extract_node(''' class A(object): def __init__(self): self.foo = 42 def __add__(self, other): return other.bar + self.foo / 2 class B(A): def __init__(self): self.bar = 24 def __radd__(self, other): return NotImplemented A() + B() #@ ''') inferred = next(ast_node.infer()) self.assertIsInstance(inferred, nodes.Const) self.assertEqual(int(inferred.value), 45) def test_aug_op_same_type_not_implemented(self): ast_node = extract_node(''' class A(object): def __iadd__(self, other): return NotImplemented def __add__(self, other): return NotImplemented A() + A() #@ ''') self.assertEqual(next(ast_node.infer()), util.Uninferable) def test_aug_op_same_type_aug_implemented(self): ast_node = extract_node(''' class A(object): def __iadd__(self, other): return other f = A() f += A() #@ ''') inferred = next(ast_node.infer()) self.assertIsInstance(inferred, Instance) self.assertEqual(inferred.name, 'A') def test_aug_op_same_type_aug_not_implemented_normal_implemented(self): ast_node = extract_node(''' class A(object): def __iadd__(self, other): return NotImplemented def __add__(self, other): return 42 f = A() f += A() #@ ''') inferred = next(ast_node.infer()) self.assertIsInstance(inferred, nodes.Const) self.assertEqual(inferred.value, 42) def test_aug_op_subtype_both_not_implemented(self): ast_node = extract_node(''' class A(object): def __iadd__(self, other): return NotImplemented def __add__(self, other): return NotImplemented class B(A): pass b = B() b+=A() #@ ''') self.assertEqual(next(ast_node.infer()), util.Uninferable) def test_aug_op_subtype_aug_op_is_implemented(self): ast_node = extract_node(''' class A(object): def __iadd__(self, other): return 42 class B(A): pass b = B() b+=A() #@ ''') inferred = next(ast_node.infer()) self.assertIsInstance(inferred, nodes.Const) self.assertEqual(inferred.value, 42) def test_aug_op_subtype_normal_op_is_implemented(self): ast_node = extract_node(''' class A(object): def __add__(self, other): return 42 class B(A): pass b = B() b+=A() #@ ''') inferred = next(ast_node.infer()) self.assertIsInstance(inferred, nodes.Const) self.assertEqual(inferred.value, 42) def test_aug_different_types_no_method_implemented(self): ast_node = extract_node(''' class A(object): pass class B(object): pass f = A() f += B() #@ ''') self.assertEqual(next(ast_node.infer()), util.Uninferable) def test_aug_different_types_augop_implemented(self): ast_node = extract_node(''' class A(object): def __iadd__(self, other): return other class B(object): pass f = A() f += B() #@ ''') inferred = next(ast_node.infer()) self.assertIsInstance(inferred, Instance) self.assertEqual(inferred.name, 'B') def test_aug_different_types_aug_not_implemented(self): ast_node = extract_node(''' class A(object): def __iadd__(self, other): return NotImplemented def __add__(self, other): return other class B(object): pass f = A() f += B() #@ ''') inferred = next(ast_node.infer()) self.assertIsInstance(inferred, Instance) self.assertEqual(inferred.name, 'B') def test_aug_different_types_aug_not_implemented_rop_fallback(self): ast_node = extract_node(''' class A(object): def __iadd__(self, other): return NotImplemented def __add__(self, other): return NotImplemented class B(object): def __radd__(self, other): return other f = A() f += B() #@ ''') inferred = next(ast_node.infer()) self.assertIsInstance(inferred, Instance) self.assertEqual(inferred.name, 'A') def test_augop_supertypes_none_implemented(self): ast_node = extract_node(''' class A(object): pass class B(object): pass a = A() a += B() #@ ''') self.assertEqual(next(ast_node.infer()), util.Uninferable) def test_augop_supertypes_not_implemented_returned_for_all(self): ast_node = extract_node(''' class A(object): def __iadd__(self, other): return NotImplemented def __add__(self, other): return NotImplemented class B(object): def __add__(self, other): return NotImplemented a = A() a += B() #@ ''') self.assertEqual(next(ast_node.infer()), util.Uninferable) def test_augop_supertypes_augop_implemented(self): ast_node = extract_node(''' class A(object): def __iadd__(self, other): return other class B(A): pass a = A() a += B() #@ ''') inferred = next(ast_node.infer()) self.assertIsInstance(inferred, Instance) self.assertEqual(inferred.name, 'B') def test_augop_supertypes_reflected_binop_implemented(self): ast_node = extract_node(''' class A(object): def __iadd__(self, other): return NotImplemented class B(A): def __radd__(self, other): return other a = A() a += B() #@ ''') inferred = next(ast_node.infer()) self.assertIsInstance(inferred, Instance) self.assertEqual(inferred.name, 'A') def test_augop_supertypes_normal_binop_implemented(self): ast_node = extract_node(''' class A(object): def __iadd__(self, other): return NotImplemented def __add__(self, other): return other class B(A): def __radd__(self, other): return NotImplemented a = A() a += B() #@ ''') inferred = next(ast_node.infer()) self.assertIsInstance(inferred, Instance) self.assertEqual(inferred.name, 'B') @unittest.expectedFailure def test_string_interpolation(self): ast_nodes = extract_node(''' "a%d%d" % (1, 2) #@ "a%(x)s" % {"x": 42} #@ ''') expected = ["a12", "a42"] for node, expected_value in zip(ast_nodes, expected): inferred = next(node.infer()) self.assertIsInstance(inferred, nodes.Const) self.assertEqual(inferred.value, expected_value) def test_mul_list_supports__index__(self): ast_nodes = extract_node(''' class Index(object): def __index__(self): return 2 class NotIndex(object): pass class NotIndex2(object): def __index__(self): return None a = [1, 2] a * Index() #@ a * NotIndex() #@ a * NotIndex2() #@ ''') first = next(ast_nodes[0].infer()) self.assertIsInstance(first, nodes.List) self.assertEqual([node.value for node in first.itered()], [1, 2, 1, 2]) for rest in ast_nodes[1:]: inferred = next(rest.infer()) self.assertEqual(inferred, util.Uninferable) def test_subscript_supports__index__(self): ast_nodes = extract_node(''' class Index(object): def __index__(self): return 2 class LambdaIndex(object): __index__ = lambda self: self.foo @property def foo(self): return 1 class NonIndex(object): __index__ = lambda self: None a = [1, 2, 3, 4] a[Index()] #@ a[LambdaIndex()] #@ a[NonIndex()] #@ ''') first = next(ast_nodes[0].infer()) self.assertIsInstance(first, nodes.Const) self.assertEqual(first.value, 3) second = next(ast_nodes[1].infer()) self.assertIsInstance(second, nodes.Const) self.assertEqual(second.value, 2) self.assertRaises(InferenceError, next, ast_nodes[2].infer()) def test_special_method_masquerading_as_another(self): ast_node = extract_node(''' class Info(object): def __add__(self, other): return "lala" __or__ = __add__ f = Info() f | Info() #@ ''') inferred = next(ast_node.infer()) self.assertIsInstance(inferred, nodes.Const) self.assertEqual(inferred.value, "lala") def test_unary_op_assignment(self): ast_node = extract_node(''' class A(object): pass def pos(self): return 42 A.__pos__ = pos f = A() +f #@ ''') inferred = next(ast_node.infer()) self.assertIsInstance(inferred, nodes.Const) self.assertEqual(inferred.value, 42) def test_unary_op_classes(self): ast_node = extract_node(''' import six class Meta(type): def __invert__(self): return 42 @six.add_metaclass(Meta) class A(object): pass ~A ''') inferred = next(ast_node.infer()) self.assertIsInstance(inferred, nodes.Const) self.assertEqual(inferred.value, 42) def _slicing_test_helper(self, pairs, cls, get_elts): for code, expected in pairs: ast_node = extract_node(code) inferred = next(ast_node.infer()) self.assertIsInstance(inferred, cls) self.assertEqual(get_elts(inferred), expected, ast_node.as_string()) def test_slicing_list(self): pairs = ( ("[1, 2, 3][:] #@", [1, 2, 3]), ("[1, 2, 3][0:] #@", [1, 2, 3]), ("[1, 2, 3][None:] #@", [1, 2, 3]), ("[1, 2, 3][None:None] #@", [1, 2, 3]), ("[1, 2, 3][0:-1] #@", [1, 2]), ("[1, 2, 3][0:2] #@", [1, 2]), ("[1, 2, 3][0:2:None] #@", [1, 2]), ("[1, 2, 3][::] #@", [1, 2, 3]), ("[1, 2, 3][::2] #@", [1, 3]), ("[1, 2, 3][::-1] #@", [3, 2, 1]), ("[1, 2, 3][0:2:2] #@", [1]), ("[1, 2, 3, 4, 5, 6][0:4-1:2+0] #@", [1, 3]), ) self._slicing_test_helper( pairs, nodes.List, lambda inferred: [elt.value for elt in inferred.elts]) def test_slicing_tuple(self): pairs = ( ("(1, 2, 3)[:] #@", [1, 2, 3]), ("(1, 2, 3)[0:] #@", [1, 2, 3]), ("(1, 2, 3)[None:] #@", [1, 2, 3]), ("(1, 2, 3)[None:None] #@", [1, 2, 3]), ("(1, 2, 3)[0:-1] #@", [1, 2]), ("(1, 2, 3)[0:2] #@", [1, 2]), ("(1, 2, 3)[0:2:None] #@", [1, 2]), ("(1, 2, 3)[::] #@", [1, 2, 3]), ("(1, 2, 3)[::2] #@", [1, 3]), ("(1, 2, 3)[::-1] #@", [3, 2, 1]), ("(1, 2, 3)[0:2:2] #@", [1]), ("(1, 2, 3, 4, 5, 6)[0:4-1:2+0] #@", [1, 3]), ) self._slicing_test_helper( pairs, nodes.Tuple, lambda inferred: [elt.value for elt in inferred.elts]) def test_slicing_str(self): pairs = ( ("'123'[:] #@", "123"), ("'123'[0:] #@", "123"), ("'123'[None:] #@", "123"), ("'123'[None:None] #@", "123"), ("'123'[0:-1] #@", "12"), ("'123'[0:2] #@", "12"), ("'123'[0:2:None] #@", "12"), ("'123'[::] #@", "123"), ("'123'[::2] #@", "13"), ("'123'[::-1] #@", "321"), ("'123'[0:2:2] #@", "1"), ("'123456'[0:4-1:2+0] #@", "13"), ) self._slicing_test_helper( pairs, nodes.Const, lambda inferred: inferred.value) def test_invalid_slicing_primaries(self): examples = [ "(lambda x: x)[1:2]", "1[2]", "(1, 2, 3)[a:]", "(1, 2, 3)[object:object]", "(1, 2, 3)[1:object]", 'enumerate[2]' ] for code in examples: node = extract_node(code) self.assertRaises(InferenceError, next, node.infer()) def test_instance_slicing(self): ast_nodes = extract_node(''' class A(object): def __getitem__(self, index): return [1, 2, 3, 4, 5][index] A()[1:] #@ A()[:2] #@ A()[1:4] #@ ''') expected_values = [ [2, 3, 4, 5], [1, 2], [2, 3, 4], ] for expected, node in zip(expected_values, ast_nodes): inferred = next(node.infer()) self.assertIsInstance(inferred, nodes.List) self.assertEqual([elt.value for elt in inferred.elts], expected) def test_instance_slicing_slices(self): ast_node = extract_node(''' class A(object): def __getitem__(self, index): return index A()[1:] #@ ''') inferred = next(ast_node.infer()) self.assertIsInstance(inferred, nodes.Slice) self.assertEqual(inferred.lower.value, 1) self.assertIsNone(inferred.upper) def test_instance_slicing_fails(self): ast_nodes = extract_node(''' class A(object): def __getitem__(self, index): return 1[index] A()[4:5] #@ A()[2:] #@ ''') for node in ast_nodes: self.assertEqual(next(node.infer()), util.Uninferable) def test_type__new__with_metaclass(self): ast_node = extract_node(''' class Metaclass(type): pass class Entity(object): pass type.__new__(Metaclass, 'NewClass', (Entity,), {'a': 1}) #@ ''') inferred = next(ast_node.infer()) self.assertIsInstance(inferred, nodes.ClassDef) self.assertEqual(inferred.name, 'NewClass') metaclass = inferred.metaclass() self.assertEqual(metaclass, inferred.root()['Metaclass']) ancestors = list(inferred.ancestors()) self.assertEqual(len(ancestors), 2) self.assertEqual(ancestors[0], inferred.root()['Entity']) attributes = inferred.getattr('a') self.assertEqual(len(attributes), 1) self.assertIsInstance(attributes[0], nodes.Const) self.assertEqual(attributes[0].value, 1) def test_type__new__not_enough_arguments(self): ast_nodes = extract_node(''' type.__new__(type, 'foo') #@ type.__new__(type, 'foo', ()) #@ type.__new__(type, 'foo', (), {}, ()) #@ ''') for node in ast_nodes: with pytest.raises(InferenceError): next(node.infer()) def test_type__new__invalid_mcs_argument(self): ast_nodes = extract_node(''' class Class(object): pass type.__new__(1, 2, 3, 4) #@ type.__new__(Class, 2, 3, 4) #@ ''') for node in ast_nodes: with pytest.raises(InferenceError): next(node.infer()) def test_type__new__invalid_name(self): ast_nodes = extract_node(''' class Class(type): pass type.__new__(Class, object, 1, 2) #@ type.__new__(Class, 1, 1, 2) #@ type.__new__(Class, [], 1, 2) #@ ''') for node in ast_nodes: with pytest.raises(InferenceError): next(node.infer()) def test_type__new__invalid_bases(self): ast_nodes = extract_node(''' type.__new__(type, 'a', 1, 2) #@ type.__new__(type, 'a', [], 2) #@ type.__new__(type, 'a', {}, 2) #@ type.__new__(type, 'a', (1, ), 2) #@ type.__new__(type, 'a', (object, 1), 2) #@ ''') for node in ast_nodes: with pytest.raises(InferenceError): next(node.infer()) def test_type__new__invalid_attrs(self): type_error_nodes = extract_node(''' type.__new__(type, 'a', (), ()) #@ type.__new__(type, 'a', (), object) #@ type.__new__(type, 'a', (), 1) #@ ''') for node in type_error_nodes: with pytest.raises(InferenceError): next(node.infer()) # Ignore invalid keys ast_nodes = extract_node(''' type.__new__(type, 'a', (), {object: 1}) #@ type.__new__(type, 'a', (), {1:2, "a":5}) #@ ''') for node in ast_nodes: inferred = next(node.infer()) self.assertIsInstance(inferred, nodes.ClassDef) def test_type__new__metaclass_lookup(self): ast_node = extract_node(''' class Metaclass(type): def test(cls): pass @classmethod def test1(cls): pass attr = 42 type.__new__(Metaclass, 'A', (), {}) #@ ''') inferred = next(ast_node.infer()) self.assertIsInstance(inferred, nodes.ClassDef) test = inferred.getattr('test') self.assertEqual(len(test), 1) self.assertIsInstance(test[0], BoundMethod) self.assertIsInstance(test[0].bound, nodes.ClassDef) self.assertEqual(test[0].bound, inferred) test1 = inferred.getattr('test1') self.assertEqual(len(test1), 1) self.assertIsInstance(test1[0], BoundMethod) self.assertIsInstance(test1[0].bound, nodes.ClassDef) self.assertEqual(test1[0].bound, inferred.metaclass()) attr = inferred.getattr('attr') self.assertEqual(len(attr), 1) self.assertIsInstance(attr[0], nodes.Const) self.assertEqual(attr[0].value, 42) def test_type__new__metaclass_and_ancestors_lookup(self): ast_node = extract_node(''' class Book(object): title = 'Ubik' class MetaBook(type): title = 'Grimus' type.__new__(MetaBook, 'book', (Book, ), {'title':'Catch 22'}) #@ ''') inferred = next(ast_node.infer()) self.assertIsInstance(inferred, nodes.ClassDef) titles = [title.value for attr in inferred.getattr('title') for title in attr.inferred()] self.assertEqual(titles, ['Catch 22', 'Ubik', 'Grimus']) @unittest.expectedFailure def test_function_metaclasses(self): # These are not supported right now, although # they will be in the future. ast_node = extract_node(''' import six class BookMeta(type): author = 'Rushdie' def metaclass_function(*args): return BookMeta @six.add_metaclass(metaclass_function) class Book(object): pass Book #@ ''') inferred = next(ast_node.infer()) metaclass = inferred.metaclass() self.assertIsInstance(metaclass, nodes.ClassDef) self.assertEqual(metaclass.name, 'BookMeta') author = next(inferred.igetattr('author')) self.assertIsInstance(author, nodes.Const) self.assertEqual(author.value, 'Rushdie') def test_subscript_inference_error(self): # Used to raise StopIteration ast_node = extract_node(''' class AttributeDict(dict): def __getitem__(self, name): return self flow = AttributeDict() flow['app'] = AttributeDict() flow['app']['config'] = AttributeDict() flow['app']['config']['doffing'] = AttributeDict() #@ ''') self.assertIsNone(helpers.safe_infer(ast_node.targets[0])) def test_classmethod_inferred_by_context(self): ast_node = extract_node(''' class Super(object): def instance(cls): return cls() instance = classmethod(instance) class Sub(Super): def method(self): return self # should see the Sub.instance() is returning a Sub # instance, not a Super instance Sub.instance().method() #@ ''') inferred = next(ast_node.infer()) self.assertIsInstance(inferred, Instance) self.assertEqual(inferred.name, 'Sub') def test_infer_call_result_invalid_dunder_call_on_instance(self): ast_nodes = extract_node(''' class A: __call__ = 42 class B: __call__ = A() class C: __call = None A() #@ B() #@ C() #@ ''') for node in ast_nodes: inferred = next(node.infer()) self.assertRaises(InferenceError, next, inferred.infer_call_result(node)) def test_context_call_for_context_managers(self): ast_nodes = extract_node(''' class A: def __enter__(self): return self class B: __enter__ = lambda self: self class C: @property def a(self): return A() def __enter__(self): return self.a with A() as a: a #@ with B() as b: b #@ with C() as c: c #@ ''') first_a = next(ast_nodes[0].infer()) self.assertIsInstance(first_a, Instance) self.assertEqual(first_a.name, 'A') second_b = next(ast_nodes[1].infer()) self.assertIsInstance(second_b, Instance) self.assertEqual(second_b.name, 'B') third_c = next(ast_nodes[2].infer()) self.assertIsInstance(third_c, Instance) self.assertEqual(third_c.name, 'A') def test_metaclass_subclasses_arguments_are_classes_not_instances(self): ast_node = extract_node(''' class A(type): def test(cls): return cls import six @six.add_metaclass(A) class B(object): pass B.test() #@ ''') inferred = next(ast_node.infer()) self.assertIsInstance(inferred, nodes.ClassDef) self.assertEqual(inferred.name, 'B') def test_infer_cls_in_class_methods(self): ast_nodes = extract_node(''' class A(type): def __call__(cls): cls #@ class B(object): def __call__(cls): cls #@ ''') first = next(ast_nodes[0].infer()) self.assertIsInstance(first, nodes.ClassDef) second = next(ast_nodes[1].infer()) self.assertIsInstance(second, Instance) @unittest.expectedFailure def test_metaclass_arguments_are_classes_not_instances(self): ast_node = extract_node(''' class A(type): def test(cls): return cls A.test() #@ ''') # This is not supported yet inferred = next(ast_node.infer()) self.assertIsInstance(inferred, nodes.ClassDef) self.assertEqual(inferred.name, 'A') @test_utils.require_version(minver='3.0') def test_metaclass_with_keyword_args(self): ast_node = extract_node(''' class TestMetaKlass(type): def __new__(mcs, name, bases, ns, kwo_arg): return super().__new__(mcs, name, bases, ns) class TestKlass(metaclass=TestMetaKlass, kwo_arg=42): #@ pass ''') inferred = next(ast_node.infer()) self.assertIsInstance(inferred, nodes.ClassDef) def test_metaclass_custom_dunder_call(self): """The Metaclass __call__ should take precedence over the default metaclass type call (initialization) See https://github.com/PyCQA/pylint/issues/2159 """ val = extract_node(""" class _Meta(type): def __call__(cls): return 1 class Clazz(metaclass=_Meta): def __call__(self): return 5.5 Clazz() #@ """).inferred()[0].value assert val == 1 def test_metaclass_custom_dunder_call_boundnode(self): """The boundnode should be the calling class""" cls = extract_node(""" class _Meta(type): def __call__(cls): return cls class Clazz(metaclass=_Meta): pass Clazz() #@ """).inferred()[0] assert isinstance(cls, nodes.ClassDef) and cls.name == "Clazz" def test_delayed_attributes_without_slots(self): ast_node = extract_node(''' class A(object): __slots__ = ('a', ) a = A() a.teta = 24 a.a = 24 a #@ ''') inferred = next(ast_node.infer()) with self.assertRaises(exceptions.NotFoundError): inferred.getattr('teta') inferred.getattr('a') @test_utils.require_version(maxver='3.0') def test_delayed_attributes_with_old_style_classes(self): ast_node = extract_node(''' class A: __slots__ = ('a', ) a = A() a.teta = 42 a #@ ''') next(ast_node.infer()).getattr('teta') def test_lambda_as_methods(self): ast_node = extract_node(''' class X: m = lambda self, arg: self.z + arg z = 24 X().m(4) #@ ''') inferred = next(ast_node.infer()) self.assertIsInstance(inferred, nodes.Const) self.assertEqual(inferred.value, 28) def test_inner_value_redefined_by_subclass(self): ast_node = extract_node(''' class X(object): M = lambda self, arg: "a" x = 24 def __init__(self): x = 24 self.m = self.M(x) class Y(X): M = lambda self, arg: arg + 1 def blurb(self): self.m #@ ''') inferred = next(ast_node.infer()) self.assertIsInstance(inferred, nodes.Const) self.assertEqual(inferred.value, 25) @unittest.expectedFailure def test_inner_value_redefined_by_subclass_with_mro(self): # This might work, but it currently doesn't due to not being able # to reuse inference contexts. ast_node = extract_node(''' class X(object): M = lambda self, arg: arg + 1 x = 24 def __init__(self): y = self self.m = y.M(1) + y.z class C(object): z = 24 class Y(X, C): M = lambda self, arg: arg + 1 def blurb(self): self.m #@ ''') inferred = next(ast_node.infer()) self.assertIsInstance(inferred, nodes.Const) self.assertEqual(inferred.value, 25) def test_getitem_of_class_raised_type_error(self): # Test that we wrap an AttributeInferenceError # and reraise it as a TypeError in Class.getitem node = extract_node(''' def test(): yield test() ''') inferred = next(node.infer()) with self.assertRaises(exceptions.AstroidTypeError): inferred.getitem(nodes.Const('4')) class GetattrTest(unittest.TestCase): def test_yes_when_unknown(self): ast_nodes = extract_node(''' from missing import Missing getattr(1, Unknown) #@ getattr(Unknown, 'a') #@ getattr(Unknown, Unknown) #@ getattr(Unknown, Unknown, Unknown) #@ getattr(Missing, 'a') #@ getattr(Missing, Missing) #@ getattr('a', Missing) #@ getattr('a', Missing, Missing) #@ ''') for node in ast_nodes[:4]: self.assertRaises(InferenceError, next, node.infer()) for node in ast_nodes[4:]: inferred = next(node.infer()) self.assertEqual(inferred, util.Uninferable, node) def test_attrname_not_string(self): ast_nodes = extract_node(''' getattr(1, 1) #@ c = int getattr(1, c) #@ ''') for node in ast_nodes: self.assertRaises(InferenceError, next, node.infer()) def test_attribute_missing(self): ast_nodes = extract_node(''' getattr(1, 'ala') #@ getattr(int, 'ala') #@ getattr(float, 'bala') #@ getattr({}, 'portocala') #@ ''') for node in ast_nodes: self.assertRaises(InferenceError, next, node.infer()) def test_default(self): ast_nodes = extract_node(''' getattr(1, 'ala', None) #@ getattr(int, 'bala', int) #@ getattr(int, 'bala', getattr(int, 'portocala', None)) #@ ''') first = next(ast_nodes[0].infer()) self.assertIsInstance(first, nodes.Const) self.assertIsNone(first.value) second = next(ast_nodes[1].infer()) self.assertIsInstance(second, nodes.ClassDef) self.assertEqual(second.qname(), "%s.int" % BUILTINS) third = next(ast_nodes[2].infer()) self.assertIsInstance(third, nodes.Const) self.assertIsNone(third.value) def test_lookup(self): ast_nodes = extract_node(''' class A(object): def test(self): pass class B(A): def test_b(self): pass class C(A): pass class E(C, B): def test_e(self): pass getattr(A(), 'test') #@ getattr(A, 'test') #@ getattr(E(), 'test_b') #@ getattr(E(), 'test') #@ class X(object): def test(self): getattr(self, 'test') #@ ''') first = next(ast_nodes[0].infer()) self.assertIsInstance(first, BoundMethod) self.assertEqual(first.bound.name, 'A') second = next(ast_nodes[1].infer()) self.assertIsInstance(second, UnboundMethod) self.assertIsInstance(second.parent, nodes.ClassDef) self.assertEqual(second.parent.name, 'A') third = next(ast_nodes[2].infer()) self.assertIsInstance(third, BoundMethod) # Bound to E, but the provider is B. self.assertEqual(third.bound.name, 'E') self.assertEqual(third._proxied._proxied.parent.name, 'B') fourth = next(ast_nodes[3].infer()) self.assertIsInstance(fourth, BoundMethod) self.assertEqual(fourth.bound.name, 'E') self.assertEqual(third._proxied._proxied.parent.name, 'B') fifth = next(ast_nodes[4].infer()) self.assertIsInstance(fifth, BoundMethod) self.assertEqual(fifth.bound.name, 'X') def test_lambda(self): node = extract_node(''' getattr(lambda x: x, 'f') #@ ''') inferred = next(node.infer()) self.assertEqual(inferred, util.Uninferable) class HasattrTest(unittest.TestCase): def test_inference_errors(self): ast_nodes = extract_node(''' from missing import Missing hasattr(Unknown, 'ala') #@ hasattr(Missing, 'bala') #@ hasattr('portocala', Missing) #@ ''') for node in ast_nodes: inferred = next(node.infer()) self.assertEqual(inferred, util.Uninferable) def test_attribute_is_missing(self): ast_nodes = extract_node(''' class A: pass hasattr(int, 'ala') #@ hasattr({}, 'bala') #@ hasattr(A(), 'portocala') #@ ''') for node in ast_nodes: inferred = next(node.infer()) self.assertIsInstance(inferred, nodes.Const) self.assertFalse(inferred.value) def test_attribute_is_not_missing(self): ast_nodes = extract_node(''' class A(object): def test(self): pass class B(A): def test_b(self): pass class C(A): pass class E(C, B): def test_e(self): pass hasattr(A(), 'test') #@ hasattr(A, 'test') #@ hasattr(E(), 'test_b') #@ hasattr(E(), 'test') #@ class X(object): def test(self): hasattr(self, 'test') #@ ''') for node in ast_nodes: inferred = next(node.infer()) self.assertIsInstance(inferred, nodes.Const) self.assertTrue(inferred.value) def test_lambda(self): node = extract_node(''' hasattr(lambda x: x, 'f') #@ ''') inferred = next(node.infer()) self.assertEqual(inferred, util.Uninferable) class BoolOpTest(unittest.TestCase): def test_bool_ops(self): expected = [ ('1 and 2', 2), ('0 and 2', 0), ('1 or 2', 1), ('0 or 2', 2), ('0 or 0 or 1', 1), ('1 and 2 and 3', 3), ('1 and 2 or 3', 2), ('1 and 0 or 3', 3), ('1 or 0 and 2', 1), ('(1 and 2) and (2 and 3)', 3), ('not 2 and 3', False), ('2 and not 3', False), ('not 0 and 3', 3), ('True and False', False), ('not (True or False) and True', False), ] for code, expected_value in expected: node = extract_node(code) inferred = next(node.infer()) self.assertEqual(inferred.value, expected_value) def test_yes_when_unknown(self): ast_nodes = extract_node(''' from unknown import unknown, any, not_any 0 and unknown #@ unknown or 0 #@ any or not_any and unknown #@ ''') for node in ast_nodes: inferred = next(node.infer()) self.assertEqual(inferred, util.Uninferable) def test_other_nodes(self): ast_nodes = extract_node(''' def test(): pass test and 0 #@ 1 and test #@ ''') first = next(ast_nodes[0].infer()) self.assertEqual(first.value, 0) second = next(ast_nodes[1].infer()) self.assertIsInstance(second, nodes.FunctionDef) self.assertEqual(second.name, 'test') class TestCallable(unittest.TestCase): def test_callable(self): expected = [ ('callable(len)', True), ('callable("a")', False), ('callable(callable)', True), ('callable(lambda x, y: x+y)', True), ('import os; __(callable(os))', False), ('callable(int)', True), (''' def test(): pass callable(test) #@''', True), (''' class C1: def meth(self): pass callable(C1) #@''', True), ] for code, expected_value in expected: node = extract_node(code) inferred = next(node.infer()) self.assertEqual(inferred.value, expected_value) def test_callable_methods(self): ast_nodes = extract_node(''' class C: def test(self): pass @staticmethod def static(): pass @classmethod def class_method(cls): pass def __call__(self): pass class D(C): pass class NotReallyCallableDueToPythonMisfeature(object): __call__ = 42 callable(C.test) #@ callable(C.static) #@ callable(C.class_method) #@ callable(C().test) #@ callable(C().static) #@ callable(C().class_method) #@ C #@ C() #@ NotReallyCallableDueToPythonMisfeature() #@ staticmethod #@ classmethod #@ property #@ D #@ D() #@ ''') for node in ast_nodes: inferred = next(node.infer()) self.assertTrue(inferred) def test_inference_errors(self): ast_nodes = extract_node(''' from unknown import unknown callable(unknown) #@ def test(): return unknown callable(test()) #@ ''') for node in ast_nodes: inferred = next(node.infer()) self.assertEqual(inferred, util.Uninferable) def test_not_callable(self): ast_nodes = extract_node(''' callable("") #@ callable(1) #@ callable(True) #@ ''') for node in ast_nodes: inferred = next(node.infer()) self.assertFalse(inferred.value) class TestBool(unittest.TestCase): def test_bool(self): pairs = [ ('bool()', False), ('bool(1)', True), ('bool(0)', False), ('bool([])', False), ('bool([1])', True), ('bool({})', False), ('bool(True)', True), ('bool(False)', False), ('bool(None)', False), ('from unknown import Unknown; __(bool(Unknown))', util.Uninferable), ] for code, expected in pairs: node = extract_node(code) inferred = next(node.infer()) if expected is util.Uninferable: self.assertEqual(expected, inferred) else: self.assertEqual(inferred.value, expected) def test_bool_bool_special_method(self): ast_nodes = extract_node(''' class FalseClass: def {method}(self): return False class TrueClass: def {method}(self): return True class C(object): def __call__(self): return False class B(object): {method} = C() class LambdaBoolFalse(object): {method} = lambda self: self.foo @property def foo(self): return 0 class FalseBoolLen(object): __len__ = lambda self: self.foo @property def foo(self): return 0 bool(FalseClass) #@ bool(TrueClass) #@ bool(FalseClass()) #@ bool(TrueClass()) #@ bool(B()) #@ bool(LambdaBoolFalse()) #@ bool(FalseBoolLen()) #@ '''.format(method=BOOL_SPECIAL_METHOD)) expected = [True, True, False, True, False, False, False] for node, expected_value in zip(ast_nodes, expected): inferred = next(node.infer()) self.assertEqual(inferred.value, expected_value) def test_bool_instance_not_callable(self): ast_nodes = extract_node(''' class BoolInvalid(object): {method} = 42 class LenInvalid(object): __len__ = "a" bool(BoolInvalid()) #@ bool(LenInvalid()) #@ '''.format(method=BOOL_SPECIAL_METHOD)) for node in ast_nodes: inferred = next(node.infer()) self.assertEqual(inferred, util.Uninferable) class TestType(unittest.TestCase): def test_type(self): pairs = [ ('type(1)', 'int'), ('type(type)', 'type'), ('type(None)', 'NoneType'), ('type(object)', 'type'), ('type(dict())', 'dict'), ('type({})', 'dict'), ('type(frozenset())', 'frozenset'), ] for code, expected in pairs: node = extract_node(code) inferred = next(node.infer()) self.assertIsInstance(inferred, nodes.ClassDef) self.assertEqual(inferred.name, expected) class ArgumentsTest(unittest.TestCase): @staticmethod def _get_dict_value(inferred): items = inferred.items return sorted((key.value, value.value) for key, value in items) @staticmethod def _get_tuple_value(inferred): elts = inferred.elts return tuple(elt.value for elt in elts) def test_args(self): expected_values = [(), (1, ), (2, 3), (4, 5), (3, ), (), (3, 4, 5), (), (), (4, ), (4, 5), (), (3, ), (), (), (3, ), (42, )] ast_nodes = extract_node(''' def func(*args): return args func() #@ func(1) #@ func(2, 3) #@ func(*(4, 5)) #@ def func(a, b, *args): return args func(1, 2, 3) #@ func(1, 2) #@ func(1, 2, 3, 4, 5) #@ def func(a, b, c=42, *args): return args func(1, 2) #@ func(1, 2, 3) #@ func(1, 2, 3, 4) #@ func(1, 2, 3, 4, 5) #@ func = lambda a, b, *args: args func(1, 2) #@ func(1, 2, 3) #@ func = lambda a, b=42, *args: args func(1) #@ func(1, 2) #@ func(1, 2, 3) #@ func(1, 2, *(42, )) #@ ''') for node, expected_value in zip(ast_nodes, expected_values): inferred = next(node.infer()) self.assertIsInstance(inferred, nodes.Tuple) self.assertEqual(self._get_tuple_value(inferred), expected_value) @test_utils.require_version('3.5') def test_multiple_starred_args(self): expected_values = [ (1, 2, 3), (1, 4, 2, 3, 5, 6, 7), ] ast_nodes = extract_node(''' def func(a, b, *args): return args func(1, 2, *(1, ), *(2, 3)) #@ func(1, 2, *(1, ), 4, *(2, 3), 5, *(6, 7)) #@ ''') for node, expected_value in zip(ast_nodes, expected_values): inferred = next(node.infer()) self.assertIsInstance(inferred, nodes.Tuple) self.assertEqual(self._get_tuple_value(inferred), expected_value) def test_defaults(self): expected_values = [42, 3, 41, 42] ast_nodes = extract_node(''' def func(a, b, c=42, *args): return c func(1, 2) #@ func(1, 2, 3) #@ func(1, 2, c=41) #@ func(1, 2, 42, 41) #@ ''') for node, expected_value in zip(ast_nodes, expected_values): inferred = next(node.infer()) self.assertIsInstance(inferred, nodes.Const) self.assertEqual(inferred.value, expected_value) @test_utils.require_version('3.0') def test_kwonly_args(self): expected_values = [24, 24, 42, 23, 24, 24, 54] ast_nodes = extract_node(''' def test(*, f, b): return f test(f=24, b=33) #@ def test(a, *, f): return f test(1, f=24) #@ def test(a, *, f=42): return f test(1) #@ test(1, f=23) #@ def test(a, b, c=42, *args, f=24): return f test(1, 2, 3) #@ test(1, 2, 3, 4) #@ test(1, 2, 3, 4, 5, f=54) #@ ''') for node, expected_value in zip(ast_nodes, expected_values): inferred = next(node.infer()) self.assertIsInstance(inferred, nodes.Const) self.assertEqual(inferred.value, expected_value) def test_kwargs(self): expected = [ [('a', 1), ('b', 2), ('c', 3)], [('a', 1)], [('a', 'b')], ] ast_nodes = extract_node(''' def test(**kwargs): return kwargs test(a=1, b=2, c=3) #@ test(a=1) #@ test(**{'a': 'b'}) #@ ''') for node, expected_value in zip(ast_nodes, expected): inferred = next(node.infer()) self.assertIsInstance(inferred, nodes.Dict) value = self._get_dict_value(inferred) self.assertEqual(value, expected_value) def test_kwargs_and_other_named_parameters(self): ast_nodes = extract_node(''' def test(a=42, b=24, **kwargs): return kwargs test(42, 24, c=3, d=4) #@ test(49, b=24, d=4) #@ test(a=42, b=33, c=3, d=42) #@ test(a=42, **{'c':42}) #@ ''') expected_values = [ [('c', 3), ('d', 4)], [('d', 4)], [('c', 3), ('d', 42)], [('c', 42)], ] for node, expected_value in zip(ast_nodes, expected_values): inferred = next(node.infer()) self.assertIsInstance(inferred, nodes.Dict) value = self._get_dict_value(inferred) self.assertEqual(value, expected_value) def test_kwargs_access_by_name(self): expected_values = [42, 42, 42, 24] ast_nodes = extract_node(''' def test(**kwargs): return kwargs['f'] test(f=42) #@ test(**{'f': 42}) #@ test(**dict(f=42)) #@ def test(f=42, **kwargs): return kwargs['l'] test(l=24) #@ ''') for ast_node, value in zip(ast_nodes, expected_values): inferred = next(ast_node.infer()) self.assertIsInstance(inferred, nodes.Const, inferred) self.assertEqual(inferred.value, value) @test_utils.require_version('3.5') def test_multiple_kwargs(self): expected_value = [ ('a', 1), ('b', 2), ('c', 3), ('d', 4), ('f', 42), ] ast_node = extract_node(''' def test(**kwargs): return kwargs test(a=1, b=2, **{'c': 3}, **{'d': 4}, f=42) #@ ''') inferred = next(ast_node.infer()) self.assertIsInstance(inferred, nodes.Dict) value = self._get_dict_value(inferred) self.assertEqual(value, expected_value) def test_kwargs_are_overridden(self): ast_nodes = extract_node(''' def test(f): return f test(f=23, **{'f': 34}) #@ def test(f=None): return f test(f=23, **{'f':23}) #@ ''') for ast_node in ast_nodes: inferred = next(ast_node.infer()) self.assertEqual(inferred, util.Uninferable) def test_fail_to_infer_args(self): ast_nodes = extract_node(''' def test(a, **kwargs): return a test(*missing) #@ test(*object) #@ test(*1) #@ def test(**kwargs): return kwargs test(**miss) #@ test(**(1, 2)) #@ test(**1) #@ test(**{misss:1}) #@ test(**{object:1}) #@ test(**{1:1}) #@ test(**{'a':1, 'a':1}) #@ def test(a): return a test() #@ test(1, 2, 3) #@ from unknown import unknown test(*unknown) #@ def test(*args): return args test(*unknown) #@ ''') for node in ast_nodes: inferred = next(node.infer()) self.assertEqual(inferred, util.Uninferable) class SliceTest(unittest.TestCase): def test_slice(self): ast_nodes = [ ('[1, 2, 3][slice(None)]', [1, 2, 3]), ('[1, 2, 3][slice(None, None)]', [1, 2, 3]), ('[1, 2, 3][slice(None, None, None)]', [1, 2, 3]), ('[1, 2, 3][slice(1, None)]', [2, 3]), ('[1, 2, 3][slice(None, 1, None)]', [1]), ('[1, 2, 3][slice(0, 1)]', [1]), ('[1, 2, 3][slice(0, 3, 2)]', [1, 3]), ] for node, expected_value in ast_nodes: ast_node = extract_node("__({})".format(node)) inferred = next(ast_node.infer()) self.assertIsInstance(inferred, nodes.List) self.assertEqual([elt.value for elt in inferred.elts], expected_value) def test_slice_inference_error(self): ast_nodes = extract_node(''' from unknown import unknown [1, 2, 3][slice(None, unknown, unknown)] #@ [1, 2, 3][slice(None, missing, missing)] #@ [1, 2, 3][slice(object, list, tuple)] #@ [1, 2, 3][slice(b'a')] #@ [1, 2, 3][slice(1, 'aa')] #@ [1, 2, 3][slice(1, 2.0, 3.0)] #@ [1, 2, 3][slice()] #@ [1, 2, 3][slice(1, 2, 3, 4)] #@ ''') for node in ast_nodes: self.assertRaises(InferenceError, next, node.infer()) def test_slice_attributes(self): ast_nodes = [ ('slice(2, 3, 4)', (2, 3, 4)), ('slice(None, None, 4)', (None, None, 4)), ('slice(None, 1, None)', (None, 1, None)), ] for code, values in ast_nodes: lower, upper, step = values node = extract_node(code) inferred = next(node.infer()) self.assertIsInstance(inferred, nodes.Slice) lower_value = next(inferred.igetattr('start')) self.assertIsInstance(lower_value, nodes.Const) self.assertEqual(lower_value.value, lower) higher_value = next(inferred.igetattr('stop')) self.assertIsInstance(higher_value, nodes.Const) self.assertEqual(higher_value.value, upper) step_value = next(inferred.igetattr('step')) self.assertIsInstance(step_value, nodes.Const) self.assertEqual(step_value.value, step) self.assertEqual(inferred.pytype(), '%s.slice' % BUILTINS) def test_slice_type(self): ast_node = extract_node('type(slice(None, None, None))') inferred = next(ast_node.infer()) self.assertIsInstance(inferred, nodes.ClassDef) self.assertEqual(inferred.name, 'slice') class CallSiteTest(unittest.TestCase): @staticmethod def _call_site_from_call(call): return arguments.CallSite.from_call(call) def _test_call_site_pair(self, code, expected_args, expected_keywords): ast_node = extract_node(code) call_site = self._call_site_from_call(ast_node) self.assertEqual(len(call_site.positional_arguments), len(expected_args)) self.assertEqual([arg.value for arg in call_site.positional_arguments], expected_args) self.assertEqual(len(call_site.keyword_arguments), len(expected_keywords)) for keyword, value in expected_keywords.items(): self.assertIn(keyword, call_site.keyword_arguments) self.assertEqual(call_site.keyword_arguments[keyword].value, value) def _test_call_site(self, pairs): for pair in pairs: self._test_call_site_pair(*pair) @test_utils.require_version('3.5') def test_call_site_starred_args(self): pairs = [ ( "f(*(1, 2), *(2, 3), *(3, 4), **{'a':1}, **{'b': 2})", [1, 2, 2, 3, 3, 4], {'a': 1, 'b': 2} ), ( "f(1, 2, *(3, 4), 5, *(6, 7), f=24, **{'c':3})", [1, 2, 3, 4, 5, 6, 7], {'f':24, 'c': 3}, ), # Too many fs passed into. ( "f(f=24, **{'f':24})", [], {}, ), ] self._test_call_site(pairs) def test_call_site(self): pairs = [ ( "f(1, 2)", [1, 2], {} ), ( "f(1, 2, *(1, 2))", [1, 2, 1, 2], {} ), ( "f(a=1, b=2, c=3)", [], {'a':1, 'b':2, 'c':3} ) ] self._test_call_site(pairs) def _test_call_site_valid_arguments(self, values, invalid): for value in values: ast_node = extract_node(value) call_site = self._call_site_from_call(ast_node) self.assertEqual(call_site.has_invalid_arguments(), invalid) def test_call_site_valid_arguments(self): values = [ "f(*lala)", "f(*1)", "f(*object)", ] self._test_call_site_valid_arguments(values, invalid=True) values = [ "f()", "f(*(1, ))", "f(1, 2, *(2, 3))", ] self._test_call_site_valid_arguments(values, invalid=False) def test_duplicated_keyword_arguments(self): ast_node = extract_node('f(f=24, **{"f": 25})') site = self._call_site_from_call(ast_node) self.assertIn('f', site.duplicated_keywords) class ObjectDunderNewTest(unittest.TestCase): def test_object_dunder_new_is_inferred_if_decorator(self): node = extract_node(''' @object.__new__ class instance(object): pass ''') inferred = next(node.infer()) self.assertIsInstance(inferred, Instance) def test_augassign_recursion(): """Make sure inference doesn't throw a RecursionError Regression test for augmented assign dropping context.path causing recursion errors """ # infinitely recurses in python code = """ def rec(): a = 0 a += rec() return a rec() """ cls_node = extract_node(code) assert next(cls_node.infer()) is util.Uninferable def test_infer_custom_inherit_from_property(): node = extract_node(''' class custom_property(property): pass class MyClass(object): @custom_property def my_prop(self): return 1 MyClass().my_prop ''') inferred = next(node.infer()) assert isinstance(inferred, nodes.Const) assert inferred.value == 1 def test_cannot_infer_call_result_for_builtin_methods(): node = extract_node(""" a = "fast" a """) inferred = next(node.infer()) lenmeth = next(inferred.igetattr("__len__")) with pytest.raises(InferenceError): next(lenmeth.infer_call_result(None, None)) def test_unpack_dicts_in_assignment(): ast_nodes = extract_node(''' a, b = {1:2, 2:3} a #@ b #@ ''') first_inferred = next(ast_nodes[0].infer()) second_inferred = next(ast_nodes[1].infer()) assert isinstance(first_inferred, nodes.Const) assert first_inferred.value == 1 assert isinstance(second_inferred, nodes.Const) assert second_inferred.value == 2 def test_slice_inference_in_for_loops(): node = extract_node(''' for a, (c, *b) in [(1, (2, 3, 4)), (4, (5, 6))]: b #@ ''') inferred = next(node.infer()) assert isinstance(inferred, nodes.List) assert inferred.as_string() == '[3, 4]' node = extract_node(''' for a, *b in [(1, 2, 3, 4)]: b #@ ''') inferred = next(node.infer()) assert isinstance(inferred, nodes.List) assert inferred.as_string() == '[2, 3, 4]' node = extract_node(''' for a, *b in [(1,)]: b #@ ''') inferred = next(node.infer()) assert isinstance(inferred, nodes.List) assert inferred.as_string() == '[]' def test_slice_inference_in_for_loops_not_working(): ast_nodes = extract_node(''' from unknown import Unknown for a, *b in something: b #@ for a, *b in Unknown: b #@ for a, *b in (1): b #@ ''') for node in ast_nodes: inferred = next(node.infer()) assert inferred == util.Uninferable def test_unpacking_starred_and_dicts_in_assignment(): node = extract_node(''' a, *b = {1:2, 2:3, 3:4} b ''') inferred = next(node.infer()) assert isinstance(inferred, nodes.List) assert inferred.as_string() == '[2, 3]' node = extract_node(''' a, *b = {1:2} b ''') inferred = next(node.infer()) assert isinstance(inferred, nodes.List) assert inferred.as_string() == '[]' def test_unpacking_starred_empty_list_in_assignment(): node = extract_node(''' a, *b, c = [1, 2] b #@ ''') inferred = next(node.infer()) assert isinstance(inferred, nodes.List) assert inferred.as_string() == '[]' def test_regression_infinite_loop_decorator(): """Make sure decorators with the same names as a decorated method do not cause an infinite loop See https://github.com/PyCQA/astroid/issues/375 """ code = """ from functools import lru_cache class Foo(): @lru_cache() def lru_cache(self, value): print('Computing {}'.format(value)) return value Foo().lru_cache(1) """ node = extract_node(code) [result] = node.inferred() assert result.value == 1 def test_call_on_instance_with_inherited_dunder_call_method(): """Stop inherited __call__ method from incorrectly returning wrong class See https://github.com/PyCQA/pylint/issues/2199 """ node = extract_node(""" class Base: def __call__(self): return self class Sub(Base): pass obj = Sub() val = obj() val #@ """) [val] = node.inferred() assert isinstance(val, Instance) assert val.name == "Sub" class TestInferencePropagation: """Make sure function argument values are properly propagated to sub functions""" def test_call_context_propagation(self): n = extract_node(""" def chest(a): return a * a def best(a, b): return chest(a) def test(a, b, c): return best(a, b) test(4, 5, 6) #@ """) assert next(n.infer()).as_string() == "16" def test_call_starargs_propagation(self): code = """ def foo(*args): return args def bar(*args): return foo(*args) bar(4, 5, 6, 7) #@ """ assert next(extract_node(code).infer()).as_string() == "(4, 5, 6, 7)" def test_call_kwargs_propagation(self): code = """ def b(**kwargs): return kwargs def f(**kwargs): return b(**kwargs) f(**{'f': 1}) #@ """ assert next(extract_node(code).infer()).as_string() == "{'f': 1}" def test_limit_inference_result_amount(): """Test setting limit inference result amount""" code = """ args = [] if True: args += ['a'] if True: args += ['b'] if True: args += ['c'] if True: args += ['d'] args #@ """ result = extract_node(code).inferred() assert len(result) == 16 with patch('astroid.node_classes.MANAGER.max_inferable_values', 4): result_limited = extract_node(code).inferred() # Can't guarentee exact size assert len(result_limited) < 16 # Will not always be at the end assert util.Uninferable in result_limited def test_attribute_inference_should_not_access_base_classes(): """attributes of classes should mask ancestor attribues""" code = """ type.__new__ #@ """ res = extract_node(code).inferred() assert len(res) == 1 assert res[0].parent.name == "type" def test_attribute_mro_object_inference(): """ Inference should only infer results from the first available method """ inferred = extract_node(""" class A: def foo(self): return 1 class B(A): def foo(self): return 2 B().foo() #@ """).inferred() assert len(inferred) == 1 assert inferred[0].value == 2 if __name__ == '__main__': unittest.main() astroid-2.0.1/astroid/tests/unittest_lookup.py0000644000076500000240000003041613324063433022366 0ustar claudiustaff00000000000000# Copyright (c) 2007-2013 LOGILAB S.A. (Paris, FRANCE) # Copyright (c) 2010 Daniel Harding # Copyright (c) 2014-2016, 2018 Claudiu Popa # Copyright (c) 2014 Google, Inc. # Copyright (c) 2015-2016 Ceridwen # Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html # For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER """tests for the astroid variable lookup capabilities """ import functools import sys import unittest from astroid import builder from astroid import exceptions from astroid import nodes from astroid import scoped_nodes from astroid import util from astroid.tests import resources class LookupTest(resources.SysPathSetup, unittest.TestCase): def setUp(self): super(LookupTest, self).setUp() self.module = resources.build_file('data/module.py', 'data.module') self.module2 = resources.build_file('data/module2.py', 'data.module2') self.nonregr = resources.build_file('data/nonregr.py', 'data.nonregr') def test_limit(self): code = ''' l = [a for a,b in list] a = 1 b = a a = None def func(): c = 1 ''' astroid = builder.parse(code, __name__) # a & b a = next(astroid.nodes_of_class(nodes.Name)) self.assertEqual(a.lineno, 2) if sys.version_info < (3, 0): self.assertEqual(len(astroid.lookup('b')[1]), 1) self.assertEqual(len(astroid.lookup('a')[1]), 1) b = astroid.locals['b'][1] else: self.assertEqual(len(astroid.lookup('b')[1]), 1) self.assertEqual(len(astroid.lookup('a')[1]), 1) b = astroid.locals['b'][0] stmts = a.lookup('a')[1] self.assertEqual(len(stmts), 1) self.assertEqual(b.lineno, 6) b_infer = b.infer() b_value = next(b_infer) self.assertEqual(b_value.value, 1) # c self.assertRaises(StopIteration, functools.partial(next, b_infer)) func = astroid.locals['func'][0] self.assertEqual(len(func.lookup('c')[1]), 1) def test_module(self): astroid = builder.parse('pass', __name__) # built-in objects none = next(astroid.ilookup('None')) self.assertIsNone(none.value) obj = next(astroid.ilookup('object')) self.assertIsInstance(obj, nodes.ClassDef) self.assertEqual(obj.name, 'object') self.assertRaises(exceptions.InferenceError, functools.partial(next, astroid.ilookup('YOAA'))) # XXX self.assertEqual(len(list(self.nonregr.ilookup('enumerate'))), 2) def test_class_ancestor_name(self): code = ''' class A: pass class A(A): pass ''' astroid = builder.parse(code, __name__) cls1 = astroid.locals['A'][0] cls2 = astroid.locals['A'][1] name = next(cls2.nodes_of_class(nodes.Name)) self.assertEqual(next(name.infer()), cls1) ### backport those test to inline code def test_method(self): method = self.module['YOUPI']['method'] my_dict = next(method.ilookup('MY_DICT')) self.assertTrue(isinstance(my_dict, nodes.Dict), my_dict) none = next(method.ilookup('None')) self.assertIsNone(none.value) self.assertRaises(exceptions.InferenceError, functools.partial(next, method.ilookup('YOAA'))) def test_function_argument_with_default(self): make_class = self.module2['make_class'] base = next(make_class.ilookup('base')) self.assertTrue(isinstance(base, nodes.ClassDef), base.__class__) self.assertEqual(base.name, 'YO') self.assertEqual(base.root().name, 'data.module') def test_class(self): klass = self.module['YOUPI'] my_dict = next(klass.ilookup('MY_DICT')) self.assertIsInstance(my_dict, nodes.Dict) none = next(klass.ilookup('None')) self.assertIsNone(none.value) obj = next(klass.ilookup('object')) self.assertIsInstance(obj, nodes.ClassDef) self.assertEqual(obj.name, 'object') self.assertRaises(exceptions.InferenceError, functools.partial(next, klass.ilookup('YOAA'))) def test_inner_classes(self): ddd = list(self.nonregr['Ccc'].ilookup('Ddd')) self.assertEqual(ddd[0].name, 'Ddd') def test_loopvar_hiding(self): astroid = builder.parse(""" x = 10 for x in range(5): print (x) if x > 0: print ('#' * x) """, __name__) xnames = [n for n in astroid.nodes_of_class(nodes.Name) if n.name == 'x'] # inside the loop, only one possible assignment self.assertEqual(len(xnames[0].lookup('x')[1]), 1) # outside the loop, two possible assignments self.assertEqual(len(xnames[1].lookup('x')[1]), 2) self.assertEqual(len(xnames[2].lookup('x')[1]), 2) def test_list_comps(self): astroid = builder.parse(""" print ([ i for i in range(10) ]) print ([ i for i in range(10) ]) print ( list( i for i in range(10) ) ) """, __name__) xnames = [n for n in astroid.nodes_of_class(nodes.Name) if n.name == 'i'] self.assertEqual(len(xnames[0].lookup('i')[1]), 1) self.assertEqual(xnames[0].lookup('i')[1][0].lineno, 2) self.assertEqual(len(xnames[1].lookup('i')[1]), 1) self.assertEqual(xnames[1].lookup('i')[1][0].lineno, 3) self.assertEqual(len(xnames[2].lookup('i')[1]), 1) self.assertEqual(xnames[2].lookup('i')[1][0].lineno, 4) def test_list_comp_target(self): """test the list comprehension target""" astroid = builder.parse(""" ten = [ var for var in range(10) ] var """) var = astroid.body[1].value if sys.version_info < (3, 0): self.assertEqual(var.inferred(), [util.Uninferable]) else: self.assertRaises(exceptions.NameInferenceError, var.inferred) def test_dict_comps(self): astroid = builder.parse(""" print ({ i: j for i in range(10) for j in range(10) }) print ({ i: j for i in range(10) for j in range(10) }) """, __name__) xnames = [n for n in astroid.nodes_of_class(nodes.Name) if n.name == 'i'] self.assertEqual(len(xnames[0].lookup('i')[1]), 1) self.assertEqual(xnames[0].lookup('i')[1][0].lineno, 2) self.assertEqual(len(xnames[1].lookup('i')[1]), 1) self.assertEqual(xnames[1].lookup('i')[1][0].lineno, 3) xnames = [n for n in astroid.nodes_of_class(nodes.Name) if n.name == 'j'] self.assertEqual(len(xnames[0].lookup('i')[1]), 1) self.assertEqual(xnames[0].lookup('i')[1][0].lineno, 2) self.assertEqual(len(xnames[1].lookup('i')[1]), 1) self.assertEqual(xnames[1].lookup('i')[1][0].lineno, 3) def test_set_comps(self): astroid = builder.parse(""" print ({ i for i in range(10) }) print ({ i for i in range(10) }) """, __name__) xnames = [n for n in astroid.nodes_of_class(nodes.Name) if n.name == 'i'] self.assertEqual(len(xnames[0].lookup('i')[1]), 1) self.assertEqual(xnames[0].lookup('i')[1][0].lineno, 2) self.assertEqual(len(xnames[1].lookup('i')[1]), 1) self.assertEqual(xnames[1].lookup('i')[1][0].lineno, 3) def test_set_comp_closure(self): astroid = builder.parse(""" ten = { var for var in range(10) } var """) var = astroid.body[1].value self.assertRaises(exceptions.NameInferenceError, var.inferred) def test_generator_attributes(self): tree = builder.parse(""" def count(): "test" yield 0 iterer = count() num = iterer.next() """) next_node = tree.body[2].value.func gener = next_node.expr.inferred()[0] if sys.version_info < (3, 0): self.assertIsInstance(gener.getattr('next')[0], nodes.FunctionDef) else: self.assertIsInstance(gener.getattr('__next__')[0], nodes.FunctionDef) self.assertIsInstance(gener.getattr('send')[0], nodes.FunctionDef) self.assertIsInstance(gener.getattr('throw')[0], nodes.FunctionDef) self.assertIsInstance(gener.getattr('close')[0], nodes.FunctionDef) def test_explicit___name__(self): code = ''' class Pouet: __name__ = "pouet" p1 = Pouet() class PouetPouet(Pouet): pass p2 = Pouet() class NoName: pass p3 = NoName() ''' astroid = builder.parse(code, __name__) p1 = next(astroid['p1'].infer()) self.assertTrue(p1.getattr('__name__')) p2 = next(astroid['p2'].infer()) self.assertTrue(p2.getattr('__name__')) self.assertTrue(astroid['NoName'].getattr('__name__')) p3 = next(astroid['p3'].infer()) self.assertRaises(exceptions.AttributeInferenceError, p3.getattr, '__name__') def test_function_module_special(self): astroid = builder.parse(''' def initialize(linter): """initialize linter with checkers in this package """ package_load(linter, __path__[0]) ''', 'data.__init__') path = [n for n in astroid.nodes_of_class(nodes.Name) if n.name == '__path__'][0] self.assertEqual(len(path.lookup('__path__')[1]), 1) def test_builtin_lookup(self): self.assertEqual(scoped_nodes.builtin_lookup('__dict__')[1], ()) intstmts = scoped_nodes.builtin_lookup('int')[1] self.assertEqual(len(intstmts), 1) self.assertIsInstance(intstmts[0], nodes.ClassDef) self.assertEqual(intstmts[0].name, 'int') # pylint: disable=no-member; Infers two potential values self.assertIs(intstmts[0], nodes.const_factory(1)._proxied) def test_decorator_arguments_lookup(self): code = ''' def decorator(value): def wrapper(function): return function return wrapper class foo: member = 10 #@ @decorator(member) #This will cause pylint to complain def test(self): pass ''' member = builder.extract_node(code, __name__).targets[0] it = member.infer() obj = next(it) self.assertIsInstance(obj, nodes.Const) self.assertEqual(obj.value, 10) self.assertRaises(StopIteration, functools.partial(next, it)) def test_inner_decorator_member_lookup(self): code = ''' class FileA: def decorator(bla): return bla @__(decorator) def funcA(): return 4 ''' decname = builder.extract_node(code, __name__) it = decname.infer() obj = next(it) self.assertIsInstance(obj, nodes.FunctionDef) self.assertRaises(StopIteration, functools.partial(next, it)) def test_static_method_lookup(self): code = ''' class FileA: @staticmethod def funcA(): return 4 class Test: FileA = [1,2,3] def __init__(self): print (FileA.funcA()) ''' astroid = builder.parse(code, __name__) it = astroid['Test']['__init__'].ilookup('FileA') obj = next(it) self.assertIsInstance(obj, nodes.ClassDef) self.assertRaises(StopIteration, functools.partial(next, it)) def test_global_delete(self): code = ''' def run2(): f = Frobble() class Frobble: pass Frobble.mumble = True del Frobble def run1(): f = Frobble() ''' astroid = builder.parse(code, __name__) stmts = astroid['run2'].lookup('Frobbel')[1] self.assertEqual(len(stmts), 0) stmts = astroid['run1'].lookup('Frobbel')[1] self.assertEqual(len(stmts), 0) if __name__ == '__main__': unittest.main() astroid-2.0.1/astroid/tests/unittest_manager.py0000644000076500000240000002716213324063433022473 0ustar claudiustaff00000000000000# Copyright (c) 2006, 2009-2014 LOGILAB S.A. (Paris, FRANCE) # Copyright (c) 2013 AndroWiiid # Copyright (c) 2014-2018 Claudiu Popa # Copyright (c) 2014 Google, Inc. # Copyright (c) 2015-2016 Ceridwen # Copyright (c) 2017 Chris Philip # Copyright (c) 2017 Hugo # Copyright (c) 2017 ioanatia # Copyright (c) 2018 Bryce Guinta # Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html # For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER import os import platform import site import sys import unittest import pkg_resources import six import astroid from astroid import exceptions from astroid import manager from astroid.tests import resources BUILTINS = six.moves.builtins.__name__ def _get_file_from_object(obj): if platform.python_implementation() == 'Jython': return obj.__file__.split("$py.class")[0] + ".py" if sys.version_info >= (3, 4): return obj.__file__ if not obj.__file__.endswith(".py"): return obj.__file__[:-1] return obj.__file__ class AstroidManagerTest(resources.SysPathSetup, resources.AstroidCacheSetupMixin, unittest.TestCase): def setUp(self): super(AstroidManagerTest, self).setUp() self.manager = manager.AstroidManager() self.manager.clear_cache(self._builtins) # take care of borg def test_ast_from_file(self): filepath = unittest.__file__ ast = self.manager.ast_from_file(filepath) self.assertEqual(ast.name, 'unittest') self.assertIn('unittest', self.manager.astroid_cache) def test_ast_from_file_cache(self): filepath = unittest.__file__ self.manager.ast_from_file(filepath) ast = self.manager.ast_from_file('unhandledName', 'unittest') self.assertEqual(ast.name, 'unittest') self.assertIn('unittest', self.manager.astroid_cache) def test_ast_from_file_astro_builder(self): filepath = unittest.__file__ ast = self.manager.ast_from_file(filepath, None, True, True) self.assertEqual(ast.name, 'unittest') self.assertIn('unittest', self.manager.astroid_cache) def test_ast_from_file_name_astro_builder_exception(self): self.assertRaises(exceptions.AstroidBuildingError, self.manager.ast_from_file, 'unhandledName') def test_do_not_expose_main(self): obj = self.manager.ast_from_module_name('__main__') self.assertEqual(obj.name, '__main__') self.assertEqual(obj.items(), []) def test_ast_from_module_name(self): ast = self.manager.ast_from_module_name('unittest') self.assertEqual(ast.name, 'unittest') self.assertIn('unittest', self.manager.astroid_cache) def test_ast_from_module_name_not_python_source(self): ast = self.manager.ast_from_module_name('time') self.assertEqual(ast.name, 'time') self.assertIn('time', self.manager.astroid_cache) self.assertEqual(ast.pure_python, False) def test_ast_from_module_name_astro_builder_exception(self): self.assertRaises(exceptions.AstroidBuildingError, self.manager.ast_from_module_name, 'unhandledModule') def _test_ast_from_old_namespace_package_protocol(self, root): origpath = sys.path[:] paths = [resources.find('data/path_{}_{}'.format(root, index)) for index in range(1, 4)] sys.path.extend(paths) try: for name in ('foo', 'bar', 'baz'): module = self.manager.ast_from_module_name('package.' + name) self.assertIsInstance(module, astroid.Module) finally: sys.path = origpath def test_ast_from_namespace_pkgutil(self): self._test_ast_from_old_namespace_package_protocol('pkgutil') def test_ast_from_namespace_pkg_resources(self): self._test_ast_from_old_namespace_package_protocol('pkg_resources') @unittest.skipUnless(sys.version_info[:2] >= (3, 4), "Needs PEP 420 namespace protocol") def test_implicit_namespace_package(self): data_dir = os.path.dirname(resources.find('data/namespace_pep_420')) contribute = os.path.join(data_dir, 'contribute_to_namespace') for value in (data_dir, contribute): sys.path.insert(0, value) try: module = self.manager.ast_from_module_name('namespace_pep_420.module') self.assertIsInstance(module, astroid.Module) self.assertEqual(module.name, 'namespace_pep_420.module') var = next(module.igetattr('var')) self.assertIsInstance(var, astroid.Const) self.assertEqual(var.value, 42) finally: for _ in range(2): sys.path.pop(0) def test_namespace_package_pth_support(self): pth = 'foogle_fax-0.12.5-py2.7-nspkg.pth' site.addpackage(resources.RESOURCE_PATH, pth, []) pkg_resources._namespace_packages['foogle'] = [] try: module = self.manager.ast_from_module_name('foogle.fax') submodule = next(module.igetattr('a')) value = next(submodule.igetattr('x')) self.assertIsInstance(value, astroid.Const) with self.assertRaises(exceptions.AstroidImportError): self.manager.ast_from_module_name('foogle.moogle') finally: del pkg_resources._namespace_packages['foogle'] sys.modules.pop('foogle') def test_nested_namespace_import(self): pth = 'foogle_fax-0.12.5-py2.7-nspkg.pth' site.addpackage(resources.RESOURCE_PATH, pth, []) pkg_resources._namespace_packages['foogle'] = ['foogle.crank'] pkg_resources._namespace_packages['foogle.crank'] = [] try: self.manager.ast_from_module_name('foogle.crank') finally: del pkg_resources._namespace_packages['foogle'] sys.modules.pop('foogle') def test_namespace_and_file_mismatch(self): filepath = unittest.__file__ ast = self.manager.ast_from_file(filepath) self.assertEqual(ast.name, 'unittest') pth = 'foogle_fax-0.12.5-py2.7-nspkg.pth' site.addpackage(resources.RESOURCE_PATH, pth, []) pkg_resources._namespace_packages['foogle'] = [] try: with self.assertRaises(exceptions.AstroidImportError): self.manager.ast_from_module_name('unittest.foogle.fax') finally: del pkg_resources._namespace_packages['foogle'] sys.modules.pop('foogle') def _test_ast_from_zip(self, archive): origpath = sys.path[:] sys.modules.pop('mypypa', None) archive_path = resources.find(archive) sys.path.insert(0, archive_path) try: module = self.manager.ast_from_module_name('mypypa') self.assertEqual(module.name, 'mypypa') end = os.path.join(archive, 'mypypa') self.assertTrue(module.file.endswith(end), "%s doesn't endswith %s" % (module.file, end)) finally: # remove the module, else after importing egg, we don't get the zip if 'mypypa' in self.manager.astroid_cache: del self.manager.astroid_cache['mypypa'] del self.manager._mod_file_cache[('mypypa', None)] if archive_path in sys.path_importer_cache: del sys.path_importer_cache[archive_path] sys.path = origpath def test_ast_from_module_name_egg(self): self._test_ast_from_zip( os.path.sep.join(['data', os.path.normcase('MyPyPa-0.1.0-py2.5.egg')]) ) def test_ast_from_module_name_zip(self): self._test_ast_from_zip( os.path.sep.join(['data', os.path.normcase('MyPyPa-0.1.0-py2.5.zip')]) ) def test_zip_import_data(self): """check if zip_import_data works""" filepath = resources.find('data/MyPyPa-0.1.0-py2.5.zip/mypypa') ast = self.manager.zip_import_data(filepath) self.assertEqual(ast.name, 'mypypa') def test_zip_import_data_without_zipimport(self): """check if zip_import_data return None without zipimport""" self.assertEqual(self.manager.zip_import_data('path'), None) def test_file_from_module(self): """check if the unittest filepath is equals to the result of the method""" self.assertEqual( _get_file_from_object(unittest), self.manager.file_from_module_name('unittest', None).location) def test_file_from_module_name_astro_building_exception(self): """check if the method launch a exception with a wrong module name""" self.assertRaises(exceptions.AstroidBuildingError, self.manager.file_from_module_name, 'unhandledModule', None) def test_ast_from_module(self): ast = self.manager.ast_from_module(unittest) self.assertEqual(ast.pure_python, True) import time ast = self.manager.ast_from_module(time) self.assertEqual(ast.pure_python, False) def test_ast_from_module_cache(self): """check if the module is in the cache manager""" ast = self.manager.ast_from_module(unittest) self.assertEqual(ast.name, 'unittest') self.assertIn('unittest', self.manager.astroid_cache) def test_ast_from_class(self): ast = self.manager.ast_from_class(int) self.assertEqual(ast.name, 'int') self.assertEqual(ast.parent.frame().name, BUILTINS) ast = self.manager.ast_from_class(object) self.assertEqual(ast.name, 'object') self.assertEqual(ast.parent.frame().name, BUILTINS) self.assertIn('__setattr__', ast) def test_ast_from_class_with_module(self): """check if the method works with the module name""" ast = self.manager.ast_from_class(int, int.__module__) self.assertEqual(ast.name, 'int') self.assertEqual(ast.parent.frame().name, BUILTINS) ast = self.manager.ast_from_class(object, object.__module__) self.assertEqual(ast.name, 'object') self.assertEqual(ast.parent.frame().name, BUILTINS) self.assertIn('__setattr__', ast) def test_ast_from_class_attr_error(self): """give a wrong class at the ast_from_class method""" self.assertRaises(exceptions.AstroidBuildingError, self.manager.ast_from_class, None) def testFailedImportHooks(self): def hook(modname): if modname == 'foo.bar': return unittest raise exceptions.AstroidBuildingError() with self.assertRaises(exceptions.AstroidBuildingError): self.manager.ast_from_module_name('foo.bar') self.manager.register_failed_import_hook(hook) self.assertEqual(unittest, self.manager.ast_from_module_name('foo.bar')) with self.assertRaises(exceptions.AstroidBuildingError): self.manager.ast_from_module_name('foo.bar.baz') del self.manager._failed_import_hooks[0] class BorgAstroidManagerTC(unittest.TestCase): def test_borg(self): """test that the AstroidManager is really a borg, i.e. that two different instances has same cache""" first_manager = manager.AstroidManager() built = first_manager.ast_from_module_name(BUILTINS) second_manager = manager.AstroidManager() second_built = second_manager.ast_from_module_name(BUILTINS) self.assertIs(built, second_built) if __name__ == '__main__': unittest.main() astroid-2.0.1/astroid/tests/unittest_modutils.py0000644000076500000240000003136313324063433022717 0ustar claudiustaff00000000000000# -*- coding: utf-8 -*- # Copyright (c) 2014-2016 Claudiu Popa # Copyright (c) 2014 Google, Inc. # Copyright (c) 2014 LOGILAB S.A. (Paris, FRANCE) # Copyright (c) 2015 Florian Bruhin # Copyright (c) 2015 Radosław Ganczarek # Copyright (c) 2016 Ceridwen # Copyright (c) 2018 Mario Corchero # Copyright (c) 2018 Mario Corchero # Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html # For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER """ unit tests for module modutils (module manipulation utilities) """ import email import os import sys import unittest from xml import etree import tempfile import shutil import astroid from astroid.interpreter._import import spec from astroid import modutils from astroid.tests import resources def _get_file_from_object(obj): return modutils._path_from_filename(obj.__file__) class ModuleFileTest(unittest.TestCase): package = "mypypa" def tearDown(self): for k in list(sys.path_importer_cache): if 'MyPyPa' in k: del sys.path_importer_cache[k] def test_find_zipped_module(self): found_spec = spec.find_spec( [self.package], [resources.find('data/MyPyPa-0.1.0-py2.5.zip')]) self.assertEqual(found_spec.type, spec.ModuleType.PY_ZIPMODULE) self.assertEqual(found_spec.location.split(os.sep)[-3:], ["data", "MyPyPa-0.1.0-py2.5.zip", self.package]) def test_find_egg_module(self): found_spec = spec.find_spec( [self.package], [resources.find('data/MyPyPa-0.1.0-py2.5.egg')]) self.assertEqual(found_spec.type, spec.ModuleType.PY_ZIPMODULE) self.assertEqual(found_spec.location.split(os.sep)[-3:], ["data", "MyPyPa-0.1.0-py2.5.egg", self.package]) class LoadModuleFromNameTest(unittest.TestCase): """ load a python module from it's name """ def test_knownValues_load_module_from_name_1(self): self.assertEqual(modutils.load_module_from_name('sys'), sys) def test_knownValues_load_module_from_name_2(self): self.assertEqual(modutils.load_module_from_name('os.path'), os.path) def test_raise_load_module_from_name_1(self): self.assertRaises(ImportError, modutils.load_module_from_name, 'os.path', use_sys=0) class GetModulePartTest(unittest.TestCase): """given a dotted name return the module part of the name""" def test_knownValues_get_module_part_1(self): self.assertEqual(modutils.get_module_part('astroid.modutils'), 'astroid.modutils') def test_knownValues_get_module_part_2(self): self.assertEqual(modutils.get_module_part('astroid.modutils.get_module_part'), 'astroid.modutils') def test_knownValues_get_module_part_3(self): """relative import from given file""" self.assertEqual(modutils.get_module_part('node_classes.AssName', modutils.__file__), 'node_classes') def test_knownValues_get_compiled_module_part(self): self.assertEqual(modutils.get_module_part('math.log10'), 'math') self.assertEqual(modutils.get_module_part('math.log10', __file__), 'math') def test_knownValues_get_builtin_module_part(self): self.assertEqual(modutils.get_module_part('sys.path'), 'sys') self.assertEqual(modutils.get_module_part('sys.path', '__file__'), 'sys') def test_get_module_part_exception(self): self.assertRaises(ImportError, modutils.get_module_part, 'unknown.module', modutils.__file__) class ModPathFromFileTest(unittest.TestCase): """ given an absolute file path return the python module's path as a list """ def test_knownValues_modpath_from_file_1(self): from xml.etree import ElementTree self.assertEqual(modutils.modpath_from_file(ElementTree.__file__), ['xml', 'etree', 'ElementTree']) def test_knownValues_modpath_from_file_2(self): self.assertEqual(modutils.modpath_from_file('unittest_modutils.py', {os.getcwd(): 'arbitrary.pkg'}), ['arbitrary', 'pkg', 'unittest_modutils']) def test_raise_modpath_from_file_Exception(self): self.assertRaises(Exception, modutils.modpath_from_file, '/turlututu') def test_import_symlink_with_source_outside_of_path(self): with tempfile.NamedTemporaryFile() as tmpfile: linked_file_name = 'symlinked_file.py' try: os.symlink(tmpfile.name, linked_file_name) self.assertEqual(modutils.modpath_from_file(linked_file_name), ['symlinked_file']) finally: os.remove(linked_file_name) def test_import_symlink_both_outside_of_path(self): with tempfile.NamedTemporaryFile() as tmpfile: linked_file_name = os.path.join(tempfile.gettempdir(), 'symlinked_file.py') try: os.symlink(tmpfile.name, linked_file_name) self.assertRaises(ImportError, modutils.modpath_from_file, linked_file_name) finally: os.remove(linked_file_name) def test_load_from_module_symlink_on_symlinked_paths_in_syspath(self): # constants tmp = tempfile.gettempdir() deployment_path = os.path.join(tmp, 'deployment') path_to_include = os.path.join(tmp, 'path_to_include') real_secret_path = os.path.join(tmp, 'secret.py') symlink_secret_path = os.path.join(path_to_include, 'secret.py') # setup double symlink # /tmp/deployment # /tmp/path_to_include (symlink to /tmp/deployment) # /tmp/secret.py # /tmp/deployment/secret.py (points to /tmp/secret.py) os.mkdir(deployment_path) self.addCleanup(shutil.rmtree, deployment_path) os.symlink(deployment_path, path_to_include) self.addCleanup(os.remove, path_to_include) with open(real_secret_path, "w"): pass os.symlink(real_secret_path, symlink_secret_path) self.addCleanup(os.remove, real_secret_path) # add the symlinked path to sys.path sys.path.append(path_to_include) self.addCleanup(sys.path.pop) # this should be equivalent to: import secret self.assertEqual( modutils.modpath_from_file(symlink_secret_path), ['secret']) class LoadModuleFromPathTest(resources.SysPathSetup, unittest.TestCase): def test_do_not_load_twice(self): modutils.load_module_from_modpath(['data', 'lmfp', 'foo']) modutils.load_module_from_modpath(['data', 'lmfp']) # pylint: disable=no-member; just-once is added by a test file dynamically. self.assertEqual(len(sys.just_once), 1) del sys.just_once class FileFromModPathTest(resources.SysPathSetup, unittest.TestCase): """given a mod path (i.e. splited module / package name), return the corresponding file, giving priority to source file over precompiled file if it exists""" def test_site_packages(self): filename = _get_file_from_object(modutils) result = modutils.file_from_modpath(['astroid', 'modutils']) self.assertEqual(os.path.realpath(result), os.path.realpath(filename)) def test_std_lib(self): path = modutils.file_from_modpath(['os', 'path']).replace('.pyc', '.py') self.assertEqual(os.path.realpath(path), os.path.realpath(os.path.__file__.replace('.pyc', '.py'))) def test_builtin(self): self.assertIsNone(modutils.file_from_modpath(['sys'])) def test_unexisting(self): self.assertRaises(ImportError, modutils.file_from_modpath, ['turlututu']) def test_unicode_in_package_init(self): # file_from_modpath should not crash when reading an __init__ # file with unicode characters. modutils.file_from_modpath(["data", "unicode_package", "core"]) class GetSourceFileTest(unittest.TestCase): def test(self): filename = _get_file_from_object(os.path) self.assertEqual(modutils.get_source_file(os.path.__file__), os.path.normpath(filename)) def test_raise(self): self.assertRaises(modutils.NoSourceFile, modutils.get_source_file, 'whatever') class StandardLibModuleTest(resources.SysPathSetup, unittest.TestCase): """ return true if the module may be considered as a module from the standard library """ def test_datetime(self): # This is an interesting example, since datetime, on pypy, # is under lib_pypy, rather than the usual Lib directory. self.assertTrue(modutils.is_standard_module('datetime')) def test_builtins(self): if sys.version_info < (3, 0): self.assertTrue(modutils.is_standard_module('__builtin__')) self.assertFalse(modutils.is_standard_module('builtins')) else: self.assertFalse(modutils.is_standard_module('__builtin__')) self.assertTrue(modutils.is_standard_module('builtins')) def test_builtin(self): self.assertTrue(modutils.is_standard_module('sys')) self.assertTrue(modutils.is_standard_module('marshal')) def test_nonstandard(self): self.assertFalse(modutils.is_standard_module('astroid')) def test_unknown(self): self.assertFalse(modutils.is_standard_module('unknown')) def test_4(self): self.assertTrue(modutils.is_standard_module('hashlib')) self.assertTrue(modutils.is_standard_module('pickle')) self.assertTrue(modutils.is_standard_module('email')) self.assertEqual(modutils.is_standard_module('io'), sys.version_info >= (2, 6)) self.assertEqual(modutils.is_standard_module('StringIO'), sys.version_info < (3, 0)) self.assertTrue(modutils.is_standard_module('unicodedata')) def test_custom_path(self): datadir = resources.find('') if datadir.startswith(modutils.EXT_LIB_DIR): self.skipTest('known breakage of is_standard_module on installed package') self.assertTrue(modutils.is_standard_module('data.module', (datadir,))) self.assertTrue(modutils.is_standard_module('data.module', (os.path.abspath(datadir),))) def test_failing_edge_cases(self): # using a subpackage/submodule path as std_path argument self.assertFalse(modutils.is_standard_module('xml.etree', etree.__path__)) # using a module + object name as modname argument self.assertTrue(modutils.is_standard_module('sys.path')) # this is because only the first package/module is considered self.assertTrue(modutils.is_standard_module('sys.whatever')) self.assertFalse(modutils.is_standard_module('xml.whatever', etree.__path__)) class IsRelativeTest(unittest.TestCase): def test_knownValues_is_relative_1(self): self.assertTrue(modutils.is_relative('utils', email.__path__[0])) def test_knownValues_is_relative_2(self): self.assertTrue(modutils.is_relative('ElementPath', etree.ElementTree.__file__)) def test_knownValues_is_relative_3(self): self.assertFalse(modutils.is_relative('astroid', astroid.__path__[0])) class GetModuleFilesTest(unittest.TestCase): def test_get_module_files_1(self): package = resources.find('data/find_test') modules = set(modutils.get_module_files(package, [])) expected = ['__init__.py', 'module.py', 'module2.py', 'noendingnewline.py', 'nonregr.py'] self.assertEqual(modules, {os.path.join(package, x) for x in expected}) def test_get_all_files(self): """test that list_all returns all Python files from given location """ non_package = resources.find('data/notamodule') modules = modutils.get_module_files(non_package, [], list_all=True) self.assertEqual( modules, [os.path.join(non_package, 'file.py')], ) def test_load_module_set_attribute(self): import xml.etree.ElementTree import xml del xml.etree.ElementTree del sys.modules['xml.etree.ElementTree'] m = modutils.load_module_from_modpath(['xml', 'etree', 'ElementTree']) self.assertTrue(hasattr(xml, 'etree')) self.assertTrue(hasattr(xml.etree, 'ElementTree')) self.assertTrue(m is xml.etree.ElementTree) if __name__ == '__main__': unittest.main() astroid-2.0.1/astroid/tests/unittest_nodes.py0000644000076500000240000010031613324064661022166 0ustar claudiustaff00000000000000# Copyright (c) 2006-2007, 2009-2014 LOGILAB S.A. (Paris, FRANCE) # Copyright (c) 2012 FELD Boris # Copyright (c) 2013-2018 Claudiu Popa # Copyright (c) 2014 Google, Inc. # Copyright (c) 2014 Eevee (Alex Munroe) # Copyright (c) 2015-2016 Ceridwen # Copyright (c) 2015 Florian Bruhin # Copyright (c) 2016 Jakub Wilk # Copyright (c) 2017 rr- # Copyright (c) 2017 Derek Gustafson # Copyright (c) 2018 brendanator # Copyright (c) 2018 Bryce Guinta # Copyright (c) 2018 Anthony Sottile # Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html # For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER """tests for specific behaviour of astroid nodes """ import os import sys import textwrap import unittest import copy import pytest import six import astroid from astroid import bases from astroid import builder from astroid import context as contextmod from astroid import exceptions from astroid import node_classes from astroid import nodes from astroid import parse from astroid import util from astroid import test_utils from astroid import transforms from astroid.tests import resources abuilder = builder.AstroidBuilder() BUILTINS = six.moves.builtins.__name__ try: import typed_ast # pylint: disable=unused-import HAS_TYPED_AST = True except ImportError: HAS_TYPED_AST = False class AsStringTest(resources.SysPathSetup, unittest.TestCase): def test_tuple_as_string(self): def build(string): return abuilder.string_build(string).body[0].value self.assertEqual(build('1,').as_string(), '(1, )') self.assertEqual(build('1, 2, 3').as_string(), '(1, 2, 3)') self.assertEqual(build('(1, )').as_string(), '(1, )') self.assertEqual(build('1, 2, 3').as_string(), '(1, 2, 3)') @test_utils.require_version(minver='3.0') def test_func_signature_issue_185(self): code = textwrap.dedent(''' def test(a, b, c=42, *, x=42, **kwargs): print(a, b, c, args) ''') node = parse(code) self.assertEqual(node.as_string().strip(), code.strip()) def test_as_string_for_list_containing_uninferable(self): node = builder.extract_node(''' def foo(): bar = [arg] * 1 ''') binop = node.body[0].value inferred = next(binop.infer()) self.assertEqual(inferred.as_string(), '[Uninferable]') self.assertEqual(binop.as_string(), '[arg] * 1') def test_frozenset_as_string(self): ast_nodes = builder.extract_node(''' frozenset((1, 2, 3)) #@ frozenset({1, 2, 3}) #@ frozenset([1, 2, 3,]) #@ frozenset(None) #@ frozenset(1) #@ ''') ast_nodes = [next(node.infer()) for node in ast_nodes] self.assertEqual(ast_nodes[0].as_string(), 'frozenset((1, 2, 3))') self.assertEqual(ast_nodes[1].as_string(), 'frozenset({1, 2, 3})') self.assertEqual(ast_nodes[2].as_string(), 'frozenset([1, 2, 3])') self.assertNotEqual(ast_nodes[3].as_string(), 'frozenset(None)') self.assertNotEqual(ast_nodes[4].as_string(), 'frozenset(1)') def test_varargs_kwargs_as_string(self): ast = abuilder.string_build('raise_string(*args, **kwargs)').body[0] self.assertEqual(ast.as_string(), 'raise_string(*args, **kwargs)') def test_module_as_string(self): """check as_string on a whole module prepared to be returned identically """ module = resources.build_file('data/module.py', 'data.module') with open(resources.find('data/module.py'), 'r') as fobj: self.assertMultiLineEqual(module.as_string(), fobj.read()) def test_module2_as_string(self): """check as_string on a whole module prepared to be returned identically """ module2 = resources.build_file('data/module2.py', 'data.module2') with open(resources.find('data/module2.py'), 'r') as fobj: self.assertMultiLineEqual(module2.as_string(), fobj.read()) def test_as_string(self): """check as_string for python syntax >= 2.7""" code = '''one_two = {1, 2} b = {v: k for (k, v) in enumerate('string')} cdd = {k for k in b}\n\n''' ast = abuilder.string_build(code) self.assertMultiLineEqual(ast.as_string(), code) @test_utils.require_version('3.0') def test_3k_as_string(self): """check as_string for python 3k syntax""" code = '''print() def function(var): nonlocal counter try: hello except NameError as nexc: (*hell, o) = b'hello' raise AttributeError from nexc \n''' ast = abuilder.string_build(code) self.assertEqual(ast.as_string(), code) @test_utils.require_version('3.0') @unittest.expectedFailure def test_3k_annotations_and_metaclass(self): code_annotations = textwrap.dedent(''' def function(var:int): nonlocal counter class Language(metaclass=Natural): """natural language""" ''') ast = abuilder.string_build(code_annotations) self.assertEqual(ast.as_string(), code_annotations) def test_ellipsis(self): ast = abuilder.string_build('a[...]').body[0] self.assertEqual(ast.as_string(), 'a[...]') def test_slices(self): for code in ('a[0]', 'a[1:3]', 'a[:-1:step]', 'a[:,newaxis]', 'a[newaxis,:]', 'del L[::2]', 'del A[1]', 'del Br[:]'): ast = abuilder.string_build(code).body[0] self.assertEqual(ast.as_string(), code) def test_slice_and_subscripts(self): code = """a[:1] = bord[2:] a[:1] = bord[2:] del bree[3:d] bord[2:] del av[d::f], a[df:] a[:1] = bord[2:] del SRC[::1,newaxis,1:] tous[vals] = 1010 del thousand[key] del a[::2], a[:-1:step] del Fee.form[left:] aout.vals = miles.of_stuff del (ccok, (name.thing, foo.attrib.value)), Fee.form[left:] if all[1] == bord[0:]: pass\n\n""" ast = abuilder.string_build(code) self.assertEqual(ast.as_string(), code) def test_operator_precedence(self): with open(resources.find('data/operator_precedence.py')) as f: for code in f: self.check_as_string_ast_equality(code) @staticmethod def check_as_string_ast_equality(code): """ Check that as_string produces source code with exactly the same semantics as the source it was originally parsed from """ pre = builder.parse(code) post = builder.parse(pre.as_string()) pre_repr = pre.repr_tree() post_repr = post.repr_tree() assert pre_repr == post_repr assert pre.as_string().strip() == code.strip() class _NodeTest(unittest.TestCase): """test transformation of If Node""" CODE = None @property def astroid(self): try: return self.__class__.__dict__['CODE_Astroid'] except KeyError: module = builder.parse(self.CODE) self.__class__.CODE_Astroid = module return module class IfNodeTest(_NodeTest): """test transformation of If Node""" CODE = """ if 0: print() if True: print() else: pass if "": print() elif []: raise if 1: print() elif True: print() elif func(): pass else: raise """ def test_if_elif_else_node(self): """test transformation for If node""" self.assertEqual(len(self.astroid.body), 4) for stmt in self.astroid.body: self.assertIsInstance(stmt, nodes.If) self.assertFalse(self.astroid.body[0].orelse) # simple If self.assertIsInstance(self.astroid.body[1].orelse[0], nodes.Pass) # If / else self.assertIsInstance(self.astroid.body[2].orelse[0], nodes.If) # If / elif self.assertIsInstance(self.astroid.body[3].orelse[0].orelse[0], nodes.If) def test_block_range(self): # XXX ensure expected values self.assertEqual(self.astroid.block_range(1), (0, 22)) self.assertEqual(self.astroid.block_range(10), (0, 22)) # XXX (10, 22) ? self.assertEqual(self.astroid.body[1].block_range(5), (5, 6)) self.assertEqual(self.astroid.body[1].block_range(6), (6, 6)) self.assertEqual(self.astroid.body[1].orelse[0].block_range(7), (7, 8)) self.assertEqual(self.astroid.body[1].orelse[0].block_range(8), (8, 8)) class TryExceptNodeTest(_NodeTest): CODE = """ try: print ('pouet') except IOError: pass except UnicodeError: print() else: print() """ def test_block_range(self): # XXX ensure expected values self.assertEqual(self.astroid.body[0].block_range(1), (1, 8)) self.assertEqual(self.astroid.body[0].block_range(2), (2, 2)) self.assertEqual(self.astroid.body[0].block_range(3), (3, 8)) self.assertEqual(self.astroid.body[0].block_range(4), (4, 4)) self.assertEqual(self.astroid.body[0].block_range(5), (5, 5)) self.assertEqual(self.astroid.body[0].block_range(6), (6, 6)) self.assertEqual(self.astroid.body[0].block_range(7), (7, 7)) self.assertEqual(self.astroid.body[0].block_range(8), (8, 8)) class TryFinallyNodeTest(_NodeTest): CODE = """ try: print ('pouet') finally: print ('pouet') """ def test_block_range(self): # XXX ensure expected values self.assertEqual(self.astroid.body[0].block_range(1), (1, 4)) self.assertEqual(self.astroid.body[0].block_range(2), (2, 2)) self.assertEqual(self.astroid.body[0].block_range(3), (3, 4)) self.assertEqual(self.astroid.body[0].block_range(4), (4, 4)) class TryExceptFinallyNodeTest(_NodeTest): CODE = """ try: print('pouet') except Exception: print ('oops') finally: print ('pouet') """ def test_block_range(self): # XXX ensure expected values self.assertEqual(self.astroid.body[0].block_range(1), (1, 6)) self.assertEqual(self.astroid.body[0].block_range(2), (2, 2)) self.assertEqual(self.astroid.body[0].block_range(3), (3, 4)) self.assertEqual(self.astroid.body[0].block_range(4), (4, 4)) self.assertEqual(self.astroid.body[0].block_range(5), (5, 5)) self.assertEqual(self.astroid.body[0].block_range(6), (6, 6)) @unittest.skipIf(six.PY3, "Python 2 specific test.") class TryExcept2xNodeTest(_NodeTest): CODE = """ try: hello except AttributeError, (retval, desc): pass """ def test_tuple_attribute(self): handler = self.astroid.body[0].handlers[0] self.assertIsInstance(handler.name, nodes.Tuple) class ImportNodeTest(resources.SysPathSetup, unittest.TestCase): def setUp(self): super(ImportNodeTest, self).setUp() self.module = resources.build_file('data/module.py', 'data.module') self.module2 = resources.build_file('data/module2.py', 'data.module2') def test_import_self_resolve(self): myos = next(self.module2.igetattr('myos')) self.assertTrue(isinstance(myos, nodes.Module), myos) self.assertEqual(myos.name, 'os') self.assertEqual(myos.qname(), 'os') self.assertEqual(myos.pytype(), '%s.module' % BUILTINS) def test_from_self_resolve(self): namenode = next(self.module.igetattr('NameNode')) self.assertTrue(isinstance(namenode, nodes.ClassDef), namenode) self.assertEqual(namenode.root().name, 'astroid.node_classes') self.assertEqual(namenode.qname(), 'astroid.node_classes.Name') self.assertEqual(namenode.pytype(), '%s.type' % BUILTINS) abspath = next(self.module2.igetattr('abspath')) self.assertTrue(isinstance(abspath, nodes.FunctionDef), abspath) self.assertEqual(abspath.root().name, 'os.path') self.assertEqual(abspath.qname(), 'os.path.abspath') self.assertEqual(abspath.pytype(), '%s.function' % BUILTINS) def test_real_name(self): from_ = self.module['NameNode'] self.assertEqual(from_.real_name('NameNode'), 'Name') imp_ = self.module['os'] self.assertEqual(imp_.real_name('os'), 'os') self.assertRaises(exceptions.AttributeInferenceError, imp_.real_name, 'os.path') imp_ = self.module['NameNode'] self.assertEqual(imp_.real_name('NameNode'), 'Name') self.assertRaises(exceptions.AttributeInferenceError, imp_.real_name, 'Name') imp_ = self.module2['YO'] self.assertEqual(imp_.real_name('YO'), 'YO') self.assertRaises(exceptions.AttributeInferenceError, imp_.real_name, 'data') def test_as_string(self): ast = self.module['modutils'] self.assertEqual(ast.as_string(), "from astroid import modutils") ast = self.module['NameNode'] self.assertEqual(ast.as_string(), "from astroid.node_classes import Name as NameNode") ast = self.module['os'] self.assertEqual(ast.as_string(), "import os.path") code = """from . import here from .. import door from .store import bread from ..cave import wine\n\n""" ast = abuilder.string_build(code) self.assertMultiLineEqual(ast.as_string(), code) def test_bad_import_inference(self): # Explication of bug '''When we import PickleError from nonexistent, a call to the infer method of this From node will be made by unpack_infer. inference.infer_from will try to import this module, which will fail and raise a InferenceException (by mixins.do_import_module). The infer_name will catch this exception and yield and Uninferable instead. ''' code = ''' try: from pickle import PickleError except ImportError: from nonexistent import PickleError try: pass except PickleError: pass ''' module = builder.parse(code) handler_type = module.body[1].handlers[0].type excs = list(node_classes.unpack_infer(handler_type)) # The number of returned object can differ on Python 2 # and Python 3. In one version, an additional item will # be returned, from the _pickle module, which is not # present in the other version. self.assertIsInstance(excs[0], nodes.ClassDef) self.assertEqual(excs[0].name, 'PickleError') self.assertIs(excs[-1], util.Uninferable) def test_absolute_import(self): module = resources.build_file('data/absimport.py') ctx = contextmod.InferenceContext() # will fail if absolute import failed ctx.lookupname = 'message' next(module['message'].infer(ctx)) ctx.lookupname = 'email' m = next(module['email'].infer(ctx)) self.assertFalse(m.file.startswith(os.path.join('data', 'email.py'))) def test_more_absolute_import(self): module = resources.build_file('data/module1abs/__init__.py', 'data.module1abs') self.assertIn('sys', module.locals) class CmpNodeTest(unittest.TestCase): def test_as_string(self): ast = abuilder.string_build("a == 2").body[0] self.assertEqual(ast.as_string(), "a == 2") class ConstNodeTest(unittest.TestCase): def _test(self, value): node = nodes.const_factory(value) # pylint: disable=no-member; Infers two potential values self.assertIsInstance(node._proxied, nodes.ClassDef) self.assertEqual(node._proxied.name, value.__class__.__name__) self.assertIs(node.value, value) self.assertTrue(node._proxied.parent) self.assertEqual(node._proxied.root().name, value.__class__.__module__) def test_none(self): self._test(None) def test_bool(self): self._test(True) def test_int(self): self._test(1) def test_float(self): self._test(1.0) def test_complex(self): self._test(1.0j) def test_str(self): self._test('a') def test_unicode(self): self._test('a') def test_copy(self): """ Make sure copying a Const object doesn't result in infinite recursion """ const = copy.copy(nodes.Const(1)) assert const.value == 1 class NameNodeTest(unittest.TestCase): def test_assign_to_True(self): """test that True and False assignments don't crash""" code = """ True = False def hello(False): pass del True """ if sys.version_info >= (3, 0): with self.assertRaises(exceptions.AstroidBuildingError): builder.parse(code) else: ast = builder.parse(code) assign_true = ast['True'] self.assertIsInstance(assign_true, nodes.AssignName) self.assertEqual(assign_true.name, "True") del_true = ast.body[2].targets[0] self.assertIsInstance(del_true, nodes.DelName) self.assertEqual(del_true.name, "True") class AnnAssignNodeTest(unittest.TestCase): @test_utils.require_version(minver='3.6') def test_primitive(self): code = textwrap.dedent(""" test: int = 5 """) assign = builder.extract_node(code) self.assertIsInstance(assign, nodes.AnnAssign) self.assertEqual(assign.target.name, "test") self.assertEqual(assign.annotation.name, "int") self.assertEqual(assign.value.value, 5) self.assertEqual(assign.simple, 1) @test_utils.require_version(minver='3.6') def test_primitive_without_initial_value(self): code = textwrap.dedent(""" test: str """) assign = builder.extract_node(code) self.assertIsInstance(assign, nodes.AnnAssign) self.assertEqual(assign.target.name, "test") self.assertEqual(assign.annotation.name, "str") self.assertEqual(assign.value, None) @test_utils.require_version(minver='3.6') def test_complex(self): code = textwrap.dedent(""" test: Dict[List[str]] = {} """) assign = builder.extract_node(code) self.assertIsInstance(assign, nodes.AnnAssign) self.assertEqual(assign.target.name, "test") self.assertIsInstance(assign.annotation, astroid.Subscript) self.assertIsInstance(assign.value, astroid.Dict) @test_utils.require_version(minver='3.6') def test_as_string(self): code = textwrap.dedent(""" print() test: int = 5 test2: str test3: List[Dict[(str, str)]] = [] """) ast = abuilder.string_build(code) self.assertEqual(ast.as_string().strip(), code.strip()) class ArgumentsNodeTC(unittest.TestCase): def test_linenumbering(self): ast = builder.parse(''' def func(a, b): pass x = lambda x: None ''') self.assertEqual(ast['func'].args.fromlineno, 2) self.assertFalse(ast['func'].args.is_statement) xlambda = next(ast['x'].infer()) self.assertEqual(xlambda.args.fromlineno, 4) self.assertEqual(xlambda.args.tolineno, 4) self.assertFalse(xlambda.args.is_statement) if sys.version_info < (3, 0): self.assertEqual(ast['func'].args.tolineno, 3) else: self.skipTest('FIXME http://bugs.python.org/issue10445 ' '(no line number on function args)') @test_utils.require_version(minver='3.0') def test_kwoargs(self): ast = builder.parse(''' def func(*, x): pass ''') args = ast['func'].args self.assertTrue(args.is_argument('x')) class UnboundMethodNodeTest(unittest.TestCase): def test_no_super_getattr(self): # This is a test for issue # https://bitbucket.org/logilab/astroid/issue/91, which tests # that UnboundMethod doesn't call super when doing .getattr. ast = builder.parse(''' class A(object): def test(self): pass meth = A.test ''') node = next(ast['meth'].infer()) with self.assertRaises(exceptions.AttributeInferenceError): node.getattr('__missssing__') name = node.getattr('__name__')[0] self.assertIsInstance(name, nodes.Const) self.assertEqual(name.value, 'test') class BoundMethodNodeTest(unittest.TestCase): def test_is_property(self): ast = builder.parse(''' import abc def cached_property(): # Not a real decorator, but we don't care pass def reify(): # Same as cached_property pass def lazy_property(): pass def lazyproperty(): pass def lazy(): pass class A(object): @property def builtin_property(self): return 42 @abc.abstractproperty def abc_property(self): return 42 @cached_property def cached_property(self): return 42 @reify def reified(self): return 42 @lazy_property def lazy_prop(self): return 42 @lazyproperty def lazyprop(self): return 42 def not_prop(self): pass @lazy def decorated_with_lazy(self): return 42 cls = A() builtin_property = cls.builtin_property abc_property = cls.abc_property cached_p = cls.cached_property reified = cls.reified not_prop = cls.not_prop lazy_prop = cls.lazy_prop lazyprop = cls.lazyprop decorated_with_lazy = cls.decorated_with_lazy ''') for prop in ('builtin_property', 'abc_property', 'cached_p', 'reified', 'lazy_prop', 'lazyprop', 'decorated_with_lazy'): inferred = next(ast[prop].infer()) self.assertIsInstance(inferred, nodes.Const, prop) self.assertEqual(inferred.value, 42, prop) inferred = next(ast['not_prop'].infer()) self.assertIsInstance(inferred, bases.BoundMethod) class AliasesTest(unittest.TestCase): def setUp(self): self.transformer = transforms.TransformVisitor() def parse_transform(self, code): module = parse(code, apply_transforms=False) return self.transformer.visit(module) def test_aliases(self): def test_from(node): node.names = node.names + [('absolute_import', None)] return node def test_class(node): node.name = 'Bar' return node def test_function(node): node.name = 'another_test' return node def test_callfunc(node): if node.func.name == 'Foo': node.func.name = 'Bar' return node return None def test_assname(node): if node.name == 'foo': return nodes.AssignName('bar', node.lineno, node.col_offset, node.parent) return None def test_assattr(node): if node.attrname == 'a': node.attrname = 'b' return node return None def test_getattr(node): if node.attrname == 'a': node.attrname = 'b' return node return None def test_genexpr(node): if node.elt.value == 1: node.elt = nodes.Const(2, node.lineno, node.col_offset, node.parent) return node return None self.transformer.register_transform(nodes.ImportFrom, test_from) self.transformer.register_transform(nodes.ClassDef, test_class) self.transformer.register_transform(nodes.FunctionDef, test_function) self.transformer.register_transform(nodes.Call, test_callfunc) self.transformer.register_transform(nodes.AssignName, test_assname) self.transformer.register_transform(nodes.AssignAttr, test_assattr) self.transformer.register_transform(nodes.Attribute, test_getattr) self.transformer.register_transform(nodes.GeneratorExp, test_genexpr) string = ''' from __future__ import print_function class Foo: pass def test(a): return a foo = Foo() foo.a = test(42) foo.a (1 for _ in range(0, 42)) ''' module = self.parse_transform(string) self.assertEqual(len(module.body[0].names), 2) self.assertIsInstance(module.body[0], nodes.ImportFrom) self.assertEqual(module.body[1].name, 'Bar') self.assertIsInstance(module.body[1], nodes.ClassDef) self.assertEqual(module.body[2].name, 'another_test') self.assertIsInstance(module.body[2], nodes.FunctionDef) self.assertEqual(module.body[3].targets[0].name, 'bar') self.assertIsInstance(module.body[3].targets[0], nodes.AssignName) self.assertEqual(module.body[3].value.func.name, 'Bar') self.assertIsInstance(module.body[3].value, nodes.Call) self.assertEqual(module.body[4].targets[0].attrname, 'b') self.assertIsInstance(module.body[4].targets[0], nodes.AssignAttr) self.assertIsInstance(module.body[5], nodes.Expr) self.assertEqual(module.body[5].value.attrname, 'b') self.assertIsInstance(module.body[5].value, nodes.Attribute) self.assertEqual(module.body[6].value.elt.value, 2) self.assertIsInstance(module.body[6].value, nodes.GeneratorExp) @test_utils.require_version('3.5') class Python35AsyncTest(unittest.TestCase): def test_async_await_keywords(self): async_def, async_for, async_with, await_node = builder.extract_node(''' async def func(): #@ async for i in range(10): #@ f = __(await i) async with test(): #@ pass ''') self.assertIsInstance(async_def, nodes.AsyncFunctionDef) self.assertIsInstance(async_for, nodes.AsyncFor) self.assertIsInstance(async_with, nodes.AsyncWith) self.assertIsInstance(await_node, nodes.Await) self.assertIsInstance(await_node.value, nodes.Name) def _test_await_async_as_string(self, code): ast_node = parse(code) self.assertEqual(ast_node.as_string().strip(), code.strip()) def test_await_as_string(self): code = textwrap.dedent(''' async def function(): await 42 await x[0] (await x)[0] await (x + y)[0] ''') self._test_await_async_as_string(code) def test_asyncwith_as_string(self): code = textwrap.dedent(''' async def function(): async with 42: pass ''') self._test_await_async_as_string(code) def test_asyncfor_as_string(self): code = textwrap.dedent(''' async def function(): async for i in range(10): await 42 ''') self._test_await_async_as_string(code) class ContextTest(unittest.TestCase): def test_subscript_load(self): node = builder.extract_node('f[1]') self.assertIs(node.ctx, astroid.Load) def test_subscript_del(self): node = builder.extract_node('del f[1]') self.assertIs(node.targets[0].ctx, astroid.Del) def test_subscript_store(self): node = builder.extract_node('f[1] = 2') subscript = node.targets[0] self.assertIs(subscript.ctx, astroid.Store) def test_list_load(self): node = builder.extract_node('[]') self.assertIs(node.ctx, astroid.Load) def test_list_del(self): node = builder.extract_node('del []') self.assertIs(node.targets[0].ctx, astroid.Del) def test_list_store(self): with self.assertRaises(exceptions.AstroidSyntaxError): builder.extract_node('[0] = 2') def test_tuple_load(self): node = builder.extract_node('(1, )') self.assertIs(node.ctx, astroid.Load) def test_tuple_store(self): with self.assertRaises(exceptions.AstroidSyntaxError): builder.extract_node('(1, ) = 3') @test_utils.require_version(minver='3.5') def test_starred_load(self): node = builder.extract_node('a = *b') starred = node.value self.assertIs(starred.ctx, astroid.Load) @test_utils.require_version(minver='3.0') def test_starred_store(self): node = builder.extract_node('a, *b = 1, 2') starred = node.targets[0].elts[1] self.assertIs(starred.ctx, astroid.Store) def test_unknown(): """Test Unknown node""" assert isinstance(next(nodes.Unknown().infer()), type(util.Uninferable)) assert isinstance(nodes.Unknown().name, str) assert isinstance(nodes.Unknown().qname(), str) @pytest.mark.skipif(not HAS_TYPED_AST, reason="requires typed_ast") def test_type_comments_with(): module = builder.parse(''' with a as b: # type: int pass with a as b: # type: ignore pass ''') node = module.body[0] ignored_node = module.body[1] assert isinstance(node.type_annotation, astroid.Name) assert ignored_node.type_annotation is None @pytest.mark.skipif(not HAS_TYPED_AST, reason="requires typed_ast") def test_type_comments_for(): module = builder.parse(''' for a, b in [1, 2, 3]: # type: List[int] pass for a, b in [1, 2, 3]: # type: ignore pass ''') node = module.body[0] ignored_node = module.body[1] assert isinstance(node.type_annotation, astroid.Subscript) assert node.type_annotation.as_string() == 'List[int]' assert ignored_node.type_annotation is None @pytest.mark.skipif(not HAS_TYPED_AST, reason="requires typed_ast") def test_type_coments_assign(): module = builder.parse(''' a, b = [1, 2, 3] # type: List[int] a, b = [1, 2, 3] # type: ignore ''') node = module.body[0] ignored_node = module.body[1] assert isinstance(node.type_annotation, astroid.Subscript) assert node.type_annotation.as_string() == 'List[int]' assert ignored_node.type_annotation is None @pytest.mark.skipif(not HAS_TYPED_AST, reason="requires typed_ast") def test_type_comments_invalid_expression(): module = builder.parse(''' a, b = [1, 2, 3] # type: something completely invalid a, b = [1, 2, 3] # typeee: 2*+4 a, b = [1, 2, 3] # type: List[int ''') for node in module.body: assert node.type_annotation is None @pytest.mark.skipif(not HAS_TYPED_AST, reason="requires typed_ast") def test_type_comments_invalid_function_comments(): module = builder.parse(''' def func(): # type: something completely invalid pass def func1(): # typeee: 2*+4 pass def func2(): # type: List[int pass ''') for node in module.body: assert node.type_comment_returns is None assert node.type_comment_args is None @pytest.mark.skipif(not HAS_TYPED_AST, reason="requires typed_ast") def test_type_comments_function(): module = builder.parse(''' def func(): # type: (int) -> str pass def func1(): # type: (int, int, int) -> (str, str) pass def func2(): # type: (int, int, str, List[int]) -> List[int] pass ''') expected_annotations = [ (["int"], astroid.Name, "str"), (["int", "int", "int"], astroid.Tuple, "(str, str)"), (["int", "int", "str", "List[int]"], astroid.Subscript, "List[int]"), ] for node, ( expected_args, expected_returns_type, expected_returns_string ) in zip(module.body, expected_annotations): assert node.type_comment_returns is not None assert node.type_comment_args is not None for expected_arg, actual_arg in zip(expected_args, node.type_comment_args): assert actual_arg.as_string() == expected_arg assert isinstance(node.type_comment_returns, expected_returns_type) assert node.type_comment_returns.as_string() == expected_returns_string if __name__ == '__main__': unittest.main() astroid-2.0.1/astroid/tests/unittest_object_model.py0000644000076500000240000004627413324063433023514 0ustar claudiustaff00000000000000# -*- coding: utf-8 -*- # Copyright (c) 2016-2018 Claudiu Popa # Copyright (c) 2016 Derek Gustafson # Copyright (c) 2017 Łukasz Rogalski # Copyright (c) 2018 Bryce Guinta # Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html # For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER import builtins import unittest import xml import astroid from astroid import builder from astroid import exceptions from astroid import MANAGER from astroid import test_utils from astroid import objects BUILTINS = MANAGER.astroid_cache[builtins.__name__] class InstanceModelTest(unittest.TestCase): def test_instance_special_model(self): ast_nodes = builder.extract_node(''' class A: "test" def __init__(self): self.a = 42 a = A() a.__class__ #@ a.__module__ #@ a.__doc__ #@ a.__dict__ #@ ''', module_name='fake_module') cls = next(ast_nodes[0].infer()) self.assertIsInstance(cls, astroid.ClassDef) self.assertEqual(cls.name, 'A') module = next(ast_nodes[1].infer()) self.assertIsInstance(module, astroid.Const) self.assertEqual(module.value, 'fake_module') doc = next(ast_nodes[2].infer()) self.assertIsInstance(doc, astroid.Const) self.assertEqual(doc.value, 'test') dunder_dict = next(ast_nodes[3].infer()) self.assertIsInstance(dunder_dict, astroid.Dict) attr = next(dunder_dict.getitem(astroid.Const('a')).infer()) self.assertIsInstance(attr, astroid.Const) self.assertEqual(attr.value, 42) @unittest.expectedFailure def test_instance_local_attributes_overrides_object_model(self): # The instance lookup needs to be changed in order for this to work. ast_node = builder.extract_node(''' class A: @property def __dict__(self): return [] A().__dict__ ''') inferred = next(ast_node.infer()) self.assertIsInstance(inferred, astroid.List) self.assertEqual(inferred.elts, []) class BoundMethodModelTest(unittest.TestCase): def test_bound_method_model(self): ast_nodes = builder.extract_node(''' class A: def test(self): pass a = A() a.test.__func__ #@ a.test.__self__ #@ ''') func = next(ast_nodes[0].infer()) self.assertIsInstance(func, astroid.FunctionDef) self.assertEqual(func.name, 'test') self_ = next(ast_nodes[1].infer()) self.assertIsInstance(self_, astroid.Instance) self.assertEqual(self_.name, 'A') class UnboundMethodModelTest(unittest.TestCase): def test_unbound_method_model(self): ast_nodes = builder.extract_node(''' class A: def test(self): pass t = A.test t.__class__ #@ t.__func__ #@ t.__self__ #@ t.im_class #@ t.im_func #@ t.im_self #@ ''') cls = next(ast_nodes[0].infer()) self.assertIsInstance(cls, astroid.ClassDef) unbound_name = 'function' self.assertEqual(cls.name, unbound_name) func = next(ast_nodes[1].infer()) self.assertIsInstance(func, astroid.FunctionDef) self.assertEqual(func.name, 'test') self_ = next(ast_nodes[2].infer()) self.assertIsInstance(self_, astroid.Const) self.assertIsNone(self_.value) self.assertEqual(cls.name, next(ast_nodes[3].infer()).name) self.assertEqual(func, next(ast_nodes[4].infer())) self.assertIsNone(next(ast_nodes[5].infer()).value) class ClassModelTest(unittest.TestCase): def test_priority_to_local_defined_values(self): ast_node = builder.extract_node(''' class A: __doc__ = "first" A.__doc__ #@ ''') inferred = next(ast_node.infer()) self.assertIsInstance(inferred, astroid.Const) self.assertEqual(inferred.value, "first") @test_utils.require_version(maxver='3.0') def test__mro__old_style(self): ast_node = builder.extract_node(''' class A: pass A.__mro__ ''') with self.assertRaises(exceptions.InferenceError): next(ast_node.infer()) @test_utils.require_version(maxver='3.0') def test__subclasses__old_style(self): ast_node = builder.extract_node(''' class A: pass A.__subclasses__ ''') with self.assertRaises(exceptions.InferenceError): next(ast_node.infer()) def test_class_model_correct_mro_subclasses_proxied(self): ast_nodes = builder.extract_node(''' class A(object): pass A.mro #@ A.__subclasses__ #@ ''') for node in ast_nodes: inferred = next(node.infer()) self.assertIsInstance(inferred, astroid.BoundMethod) self.assertIsInstance(inferred._proxied, astroid.FunctionDef) self.assertIsInstance(inferred.bound, astroid.ClassDef) self.assertEqual(inferred.bound.name, 'type') def test_class_model(self): ast_nodes = builder.extract_node(''' class A(object): "test" class B(A): pass class C(A): pass A.__module__ #@ A.__name__ #@ A.__qualname__ #@ A.__doc__ #@ A.__mro__ #@ A.mro() #@ A.__bases__ #@ A.__class__ #@ A.__dict__ #@ A.__subclasses__() #@ ''', module_name='fake_module') module = next(ast_nodes[0].infer()) self.assertIsInstance(module, astroid.Const) self.assertEqual(module.value, 'fake_module') name = next(ast_nodes[1].infer()) self.assertIsInstance(name, astroid.Const) self.assertEqual(name.value, 'A') qualname = next(ast_nodes[2].infer()) self.assertIsInstance(qualname, astroid.Const) self.assertEqual(qualname.value, 'fake_module.A') doc = next(ast_nodes[3].infer()) self.assertIsInstance(doc, astroid.Const) self.assertEqual(doc.value, 'test') mro = next(ast_nodes[4].infer()) self.assertIsInstance(mro, astroid.Tuple) self.assertEqual([cls.name for cls in mro.elts], ['A', 'object']) called_mro = next(ast_nodes[5].infer()) self.assertEqual(called_mro.elts, mro.elts) bases = next(ast_nodes[6].infer()) self.assertIsInstance(bases, astroid.Tuple) self.assertEqual([cls.name for cls in bases.elts], ['object']) cls = next(ast_nodes[7].infer()) self.assertIsInstance(cls, astroid.ClassDef) self.assertEqual(cls.name, 'type') cls_dict = next(ast_nodes[8].infer()) self.assertIsInstance(cls_dict, astroid.Dict) subclasses = next(ast_nodes[9].infer()) self.assertIsInstance(subclasses, astroid.List) self.assertEqual([cls.name for cls in subclasses.elts], ['B', 'C']) class ModuleModelTest(unittest.TestCase): def test_priority_to_local_defined_values(self): ast_node = astroid.parse(''' __file__ = "mine" ''') file_value = next(ast_node.igetattr('__file__')) self.assertIsInstance(file_value, astroid.Const) self.assertEqual(file_value.value, "mine") def test__path__not_a_package(self): ast_node = builder.extract_node(''' import sys sys.__path__ #@ ''') with self.assertRaises(exceptions.InferenceError): next(ast_node.infer()) def test_module_model(self): ast_nodes = builder.extract_node(''' import xml xml.__path__ #@ xml.__name__ #@ xml.__doc__ #@ xml.__file__ #@ xml.__spec__ #@ xml.__loader__ #@ xml.__cached__ #@ xml.__package__ #@ xml.__dict__ #@ ''') path = next(ast_nodes[0].infer()) self.assertIsInstance(path, astroid.List) self.assertIsInstance(path.elts[0], astroid.Const) self.assertEqual(path.elts[0].value, xml.__path__[0]) name = next(ast_nodes[1].infer()) self.assertIsInstance(name, astroid.Const) self.assertEqual(name.value, 'xml') doc = next(ast_nodes[2].infer()) self.assertIsInstance(doc, astroid.Const) self.assertEqual(doc.value, xml.__doc__) file_ = next(ast_nodes[3].infer()) self.assertIsInstance(file_, astroid.Const) self.assertEqual(file_.value, xml.__file__.replace(".pyc", ".py")) for ast_node in ast_nodes[4:7]: inferred = next(ast_node.infer()) self.assertIs(inferred, astroid.Uninferable) package = next(ast_nodes[7].infer()) self.assertIsInstance(package, astroid.Const) self.assertEqual(package.value, 'xml') dict_ = next(ast_nodes[8].infer()) self.assertIsInstance(dict_, astroid.Dict) class FunctionModelTest(unittest.TestCase): def test_partial_descriptor_support(self): bound, result = builder.extract_node(''' class A(object): pass def test(self): return 42 f = test.__get__(A(), A) f #@ f() #@ ''') bound = next(bound.infer()) self.assertIsInstance(bound, astroid.BoundMethod) self.assertEqual(bound._proxied._proxied.name, 'test') result = next(result.infer()) self.assertIsInstance(result, astroid.Const) self.assertEqual(result.value, 42) def test___get__has_extra_params_defined(self): node = builder.extract_node(''' def test(self): return 42 test.__get__ ''') inferred = next(node.infer()) self.assertIsInstance(inferred, astroid.BoundMethod) args = inferred.args.args self.assertEqual(len(args), 2) self.assertEqual([arg.name for arg in args], ['self', 'type']) @unittest.expectedFailure def test_descriptor_not_inferrring_self(self): # We can't infer __get__(X, Y)() when the bounded function # uses self, because of the tree's parent not being propagating good enough. result = builder.extract_node(''' class A(object): x = 42 def test(self): return self.x f = test.__get__(A(), A) f() #@ ''') result = next(result.infer()) self.assertIsInstance(result, astroid.Const) self.assertEqual(result.value, 42) def test_descriptors_binding_invalid(self): ast_nodes = builder.extract_node(''' class A: pass def test(self): return 42 test.__get__()() #@ test.__get__(1)() #@ test.__get__(2, 3, 4) #@ ''') for node in ast_nodes: with self.assertRaises(exceptions.InferenceError): next(node.infer()) def test_descriptor_error_regression(self): """Make sure the following code does node cause an exception""" node = builder.extract_node(''' class MyClass: text = "MyText" def mymethod1(self): return self.text def mymethod2(self): return self.mymethod1.__get__(self, MyClass) cl = MyClass().mymethod2()() cl #@ ''') [const] = node.inferred() assert const.value == "MyText" def test_function_model(self): ast_nodes = builder.extract_node(''' def func(a=1, b=2): """test""" func.__name__ #@ func.__doc__ #@ func.__qualname__ #@ func.__module__ #@ func.__defaults__ #@ func.__dict__ #@ func.__globals__ #@ func.__code__ #@ func.__closure__ #@ ''', module_name='fake_module') name = next(ast_nodes[0].infer()) self.assertIsInstance(name, astroid.Const) self.assertEqual(name.value, 'func') doc = next(ast_nodes[1].infer()) self.assertIsInstance(doc, astroid.Const) self.assertEqual(doc.value, 'test') qualname = next(ast_nodes[2].infer()) self.assertIsInstance(qualname, astroid.Const) self.assertEqual(qualname.value, 'fake_module.func') module = next(ast_nodes[3].infer()) self.assertIsInstance(module, astroid.Const) self.assertEqual(module.value, 'fake_module') defaults = next(ast_nodes[4].infer()) self.assertIsInstance(defaults, astroid.Tuple) self.assertEqual([default.value for default in defaults.elts], [1, 2]) dict_ = next(ast_nodes[5].infer()) self.assertIsInstance(dict_, astroid.Dict) globals_ = next(ast_nodes[6].infer()) self.assertIsInstance(globals_, astroid.Dict) for ast_node in ast_nodes[7:9]: self.assertIs(next(ast_node.infer()), astroid.Uninferable) @test_utils.require_version(minver='3.0') def test_empty_return_annotation(self): ast_node = builder.extract_node(''' def test(): pass test.__annotations__ ''') annotations = next(ast_node.infer()) self.assertIsInstance(annotations, astroid.Dict) self.assertEqual(len(annotations.items), 0) @test_utils.require_version(minver='3.0') def test_builtin_dunder_init_does_not_crash_when_accessing_annotations(self): ast_node = builder.extract_node(''' class Class: @classmethod def class_method(cls): cls.__init__.__annotations__ #@ ''') inferred = next(ast_node.infer()) self.assertIsInstance(inferred, astroid.Dict) self.assertEqual(len(inferred.items), 0) @test_utils.require_version(minver='3.0') def test_annotations_kwdefaults(self): ast_node = builder.extract_node(''' def test(a: 1, *args: 2, f:4='lala', **kwarg:3)->2: pass test.__annotations__ #@ test.__kwdefaults__ #@ ''') annotations = next(ast_node[0].infer()) self.assertIsInstance(annotations, astroid.Dict) self.assertIsInstance(annotations.getitem(astroid.Const('return')), astroid.Const) self.assertEqual(annotations.getitem(astroid.Const('return')).value, 2) self.assertIsInstance(annotations.getitem(astroid.Const('a')), astroid.Const) self.assertEqual(annotations.getitem(astroid.Const('a')).value, 1) self.assertEqual(annotations.getitem(astroid.Const('args')).value, 2) self.assertEqual(annotations.getitem(astroid.Const('kwarg')).value, 3) self.assertEqual(annotations.getitem(astroid.Const('f')).value, 4) kwdefaults = next(ast_node[1].infer()) self.assertIsInstance(kwdefaults, astroid.Dict) # self.assertEqual(kwdefaults.getitem('f').value, 'lala') @test_utils.require_version(maxver='3.0') def test_function_model_for_python2(self): ast_nodes = builder.extract_node(''' def test(a=1): "a" test.func_name #@ test.func_doc #@ test.func_dict #@ test.func_globals #@ test.func_defaults #@ test.func_code #@ test.func_closure #@ ''') name = next(ast_nodes[0].infer()) self.assertIsInstance(name, astroid.Const) self.assertEqual(name.value, 'test') doc = next(ast_nodes[1].infer()) self.assertIsInstance(doc, astroid.Const) self.assertEqual(doc.value, 'a') pydict = next(ast_nodes[2].infer()) self.assertIsInstance(pydict, astroid.Dict) pyglobals = next(ast_nodes[3].infer()) self.assertIsInstance(pyglobals, astroid.Dict) defaults = next(ast_nodes[4].infer()) self.assertIsInstance(defaults, astroid.Tuple) for node in ast_nodes[5:]: self.assertIs(next(node.infer()), astroid.Uninferable) class GeneratorModelTest(unittest.TestCase): def test_model(self): ast_nodes = builder.extract_node(''' def test(): "a" yield gen = test() gen.__name__ #@ gen.__doc__ #@ gen.gi_code #@ gen.gi_frame #@ gen.send #@ ''') name = next(ast_nodes[0].infer()) self.assertEqual(name.value, 'test') doc = next(ast_nodes[1].infer()) self.assertEqual(doc.value, 'a') gi_code = next(ast_nodes[2].infer()) self.assertIsInstance(gi_code, astroid.ClassDef) self.assertEqual(gi_code.name, 'gi_code') gi_frame = next(ast_nodes[3].infer()) self.assertIsInstance(gi_frame, astroid.ClassDef) self.assertEqual(gi_frame.name, 'gi_frame') send = next(ast_nodes[4].infer()) self.assertIsInstance(send, astroid.BoundMethod) class ExceptionModelTest(unittest.TestCase): def test_model_py3(self): ast_nodes = builder.extract_node(''' try: x[42] except ValueError as err: err.args #@ err.__traceback__ #@ err.message #@ ''') args = next(ast_nodes[0].infer()) self.assertIsInstance(args, astroid.Tuple) tb = next(ast_nodes[1].infer()) self.assertIsInstance(tb, astroid.Instance) self.assertEqual(tb.name, 'traceback') with self.assertRaises(exceptions.InferenceError): next(ast_nodes[2].infer()) class DictObjectModelTest(unittest.TestCase): def test__class__(self): ast_node = builder.extract_node('{}.__class__') inferred = next(ast_node.infer()) self.assertIsInstance(inferred, astroid.ClassDef) self.assertEqual(inferred.name, 'dict') def test_attributes_inferred_as_methods(self): ast_nodes = builder.extract_node(''' {}.values #@ {}.items #@ {}.keys #@ ''') for node in ast_nodes: inferred = next(node.infer()) self.assertIsInstance(inferred, astroid.BoundMethod) def test_wrapper_objects_for_dict_methods_python3(self): ast_nodes = builder.extract_node(''' {1:1, 2:3}.values() #@ {1:1, 2:3}.keys() #@ {1:1, 2:3}.items() #@ ''') values = next(ast_nodes[0].infer()) self.assertIsInstance(values, objects.DictValues) self.assertEqual([elt.value for elt in values.elts], [1, 3]) keys = next(ast_nodes[1].infer()) self.assertIsInstance(keys, objects.DictKeys) self.assertEqual([elt.value for elt in keys.elts], [1, 2]) items = next(ast_nodes[2].infer()) self.assertIsInstance(items, objects.DictItems) class LruCacheModelTest(unittest.TestCase): def test_lru_cache(self): ast_nodes = builder.extract_node(''' import functools class Foo(object): @functools.lru_cache() def foo(): pass f = Foo() f.foo.cache_clear #@ f.foo.__wrapped__ #@ f.foo.cache_info() #@ ''') cache_clear = next(ast_nodes[0].infer()) self.assertIsInstance(cache_clear, astroid.BoundMethod) wrapped = next(ast_nodes[1].infer()) self.assertIsInstance(wrapped, astroid.FunctionDef) self.assertEqual(wrapped.name, 'foo') cache_info = next(ast_nodes[2].infer()) self.assertIsInstance(cache_info, astroid.Instance) if __name__ == '__main__': unittest.main() astroid-2.0.1/astroid/tests/unittest_objects.py0000644000076500000240000004634113324063433022512 0ustar claudiustaff00000000000000# Copyright (c) 2015-2016 Claudiu Popa # Copyright (c) 2015-2016 Ceridwen # Copyright (c) 2018 Bryce Guinta # Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html # For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER import unittest from astroid import bases from astroid import builder from astroid import exceptions from astroid import nodes from astroid import objects from astroid import test_utils class ObjectsTest(unittest.TestCase): def test_frozenset(self): node = builder.extract_node(""" frozenset({1: 2, 2: 3}) #@ """) inferred = next(node.infer()) self.assertIsInstance(inferred, objects.FrozenSet) self.assertEqual(inferred.pytype(), "%s.frozenset" % bases.BUILTINS) itered = inferred.itered() self.assertEqual(len(itered), 2) self.assertIsInstance(itered[0], nodes.Const) self.assertEqual([const.value for const in itered], [1, 2]) proxied = inferred._proxied self.assertEqual(inferred.qname(), "%s.frozenset" % bases.BUILTINS) self.assertIsInstance(proxied, nodes.ClassDef) class SuperTests(unittest.TestCase): def test_inferring_super_outside_methods(self): ast_nodes = builder.extract_node(''' class Module(object): pass class StaticMethod(object): @staticmethod def static(): # valid, but we don't bother with it. return super(StaticMethod, StaticMethod) #@ # super outside methods aren't inferred super(Module, Module) #@ # no argument super is not recognised outside methods as well. super() #@ ''') in_static = next(ast_nodes[0].value.infer()) self.assertIsInstance(in_static, bases.Instance) self.assertEqual(in_static.qname(), "%s.super" % bases.BUILTINS) module_level = next(ast_nodes[1].infer()) self.assertIsInstance(module_level, bases.Instance) self.assertEqual(in_static.qname(), "%s.super" % bases.BUILTINS) no_arguments = next(ast_nodes[2].infer()) self.assertIsInstance(no_arguments, bases.Instance) self.assertEqual(no_arguments.qname(), "%s.super" % bases.BUILTINS) def test_inferring_unbound_super_doesnt_work(self): node = builder.extract_node(''' class Test(object): def __init__(self): super(Test) #@ ''') unbounded = next(node.infer()) self.assertIsInstance(unbounded, bases.Instance) self.assertEqual(unbounded.qname(), "%s.super" % bases.BUILTINS) def test_use_default_inference_on_not_inferring_args(self): ast_nodes = builder.extract_node(''' class Test(object): def __init__(self): super(Lala, self) #@ super(Test, lala) #@ ''') first = next(ast_nodes[0].infer()) self.assertIsInstance(first, bases.Instance) self.assertEqual(first.qname(), "%s.super" % bases.BUILTINS) second = next(ast_nodes[1].infer()) self.assertIsInstance(second, bases.Instance) self.assertEqual(second.qname(), "%s.super" % bases.BUILTINS) @test_utils.require_version(maxver='3.0') def test_super_on_old_style_class(self): # super doesn't work on old style class, but leave # that as an error for pylint. We'll infer Super objects, # but every call will result in a failure at some point. node = builder.extract_node(''' class OldStyle: def __init__(self): super(OldStyle, self) #@ ''') old = next(node.infer()) self.assertIsInstance(old, objects.Super) self.assertIsInstance(old.mro_pointer, nodes.ClassDef) self.assertEqual(old.mro_pointer.name, 'OldStyle') with self.assertRaises(exceptions.SuperError) as cm: old.super_mro() self.assertEqual(str(cm.exception), "Unable to call super on old-style classes.") @test_utils.require_version(minver='3.0') def test_no_arguments_super(self): ast_nodes = builder.extract_node(''' class First(object): pass class Second(First): def test(self): super() #@ @classmethod def test_classmethod(cls): super() #@ ''') first = next(ast_nodes[0].infer()) self.assertIsInstance(first, objects.Super) self.assertIsInstance(first.type, bases.Instance) self.assertEqual(first.type.name, 'Second') self.assertIsInstance(first.mro_pointer, nodes.ClassDef) self.assertEqual(first.mro_pointer.name, 'Second') second = next(ast_nodes[1].infer()) self.assertIsInstance(second, objects.Super) self.assertIsInstance(second.type, nodes.ClassDef) self.assertEqual(second.type.name, 'Second') self.assertIsInstance(second.mro_pointer, nodes.ClassDef) self.assertEqual(second.mro_pointer.name, 'Second') def test_super_simple_cases(self): ast_nodes = builder.extract_node(''' class First(object): pass class Second(First): pass class Third(First): def test(self): super(Third, self) #@ super(Second, self) #@ # mro position and the type super(Third, Third) #@ super(Third, Second) #@ super(Fourth, Fourth) #@ class Fourth(Third): pass ''') # .type is the object which provides the mro. # .mro_pointer is the position in the mro from where # the lookup should be done. # super(Third, self) first = next(ast_nodes[0].infer()) self.assertIsInstance(first, objects.Super) self.assertIsInstance(first.type, bases.Instance) self.assertEqual(first.type.name, 'Third') self.assertIsInstance(first.mro_pointer, nodes.ClassDef) self.assertEqual(first.mro_pointer.name, 'Third') # super(Second, self) second = next(ast_nodes[1].infer()) self.assertIsInstance(second, objects.Super) self.assertIsInstance(second.type, bases.Instance) self.assertEqual(second.type.name, 'Third') self.assertIsInstance(first.mro_pointer, nodes.ClassDef) self.assertEqual(second.mro_pointer.name, 'Second') # super(Third, Third) third = next(ast_nodes[2].infer()) self.assertIsInstance(third, objects.Super) self.assertIsInstance(third.type, nodes.ClassDef) self.assertEqual(third.type.name, 'Third') self.assertIsInstance(third.mro_pointer, nodes.ClassDef) self.assertEqual(third.mro_pointer.name, 'Third') # super(Third, second) fourth = next(ast_nodes[3].infer()) self.assertIsInstance(fourth, objects.Super) self.assertIsInstance(fourth.type, nodes.ClassDef) self.assertEqual(fourth.type.name, 'Second') self.assertIsInstance(fourth.mro_pointer, nodes.ClassDef) self.assertEqual(fourth.mro_pointer.name, 'Third') # Super(Fourth, Fourth) fifth = next(ast_nodes[4].infer()) self.assertIsInstance(fifth, objects.Super) self.assertIsInstance(fifth.type, nodes.ClassDef) self.assertEqual(fifth.type.name, 'Fourth') self.assertIsInstance(fifth.mro_pointer, nodes.ClassDef) self.assertEqual(fifth.mro_pointer.name, 'Fourth') def test_super_infer(self): node = builder.extract_node(''' class Super(object): def __init__(self): super(Super, self) #@ ''') inferred = next(node.infer()) self.assertIsInstance(inferred, objects.Super) reinferred = next(inferred.infer()) self.assertIsInstance(reinferred, objects.Super) self.assertIs(inferred, reinferred) def test_inferring_invalid_supers(self): ast_nodes = builder.extract_node(''' class Super(object): def __init__(self): # MRO pointer is not a type super(1, self) #@ # MRO type is not a subtype super(Super, 1) #@ # self is not a subtype of Bupper super(Bupper, self) #@ class Bupper(Super): pass ''') first = next(ast_nodes[0].infer()) self.assertIsInstance(first, objects.Super) with self.assertRaises(exceptions.SuperError) as cm: first.super_mro() self.assertIsInstance(cm.exception.super_.mro_pointer, nodes.Const) self.assertEqual(cm.exception.super_.mro_pointer.value, 1) for node, invalid_type in zip(ast_nodes[1:], (nodes.Const, bases.Instance)): inferred = next(node.infer()) self.assertIsInstance(inferred, objects.Super, node) with self.assertRaises(exceptions.SuperError) as cm: inferred.super_mro() self.assertIsInstance(cm.exception.super_.type, invalid_type) def test_proxied(self): node = builder.extract_node(''' class Super(object): def __init__(self): super(Super, self) #@ ''') inferred = next(node.infer()) proxied = inferred._proxied self.assertEqual(proxied.qname(), "%s.super" % bases.BUILTINS) self.assertIsInstance(proxied, nodes.ClassDef) def test_super_bound_model(self): ast_nodes = builder.extract_node(''' class First(object): def method(self): pass @classmethod def class_method(cls): pass class Super_Type_Type(First): def method(self): super(Super_Type_Type, Super_Type_Type).method #@ super(Super_Type_Type, Super_Type_Type).class_method #@ @classmethod def class_method(cls): super(Super_Type_Type, Super_Type_Type).method #@ super(Super_Type_Type, Super_Type_Type).class_method #@ class Super_Type_Object(First): def method(self): super(Super_Type_Object, self).method #@ super(Super_Type_Object, self).class_method #@ ''') # Super(type, type) is the same for both functions and classmethods. first = next(ast_nodes[0].infer()) self.assertIsInstance(first, nodes.FunctionDef) self.assertEqual(first.name, 'method') second = next(ast_nodes[1].infer()) self.assertIsInstance(second, bases.BoundMethod) self.assertEqual(second.bound.name, 'First') self.assertEqual(second.type, 'classmethod') third = next(ast_nodes[2].infer()) self.assertIsInstance(third, nodes.FunctionDef) self.assertEqual(third.name, 'method') fourth = next(ast_nodes[3].infer()) self.assertIsInstance(fourth, bases.BoundMethod) self.assertEqual(fourth.bound.name, 'First') self.assertEqual(fourth.type, 'classmethod') # Super(type, obj) can lead to different attribute bindings # depending on the type of the place where super was called. fifth = next(ast_nodes[4].infer()) self.assertIsInstance(fifth, bases.BoundMethod) self.assertEqual(fifth.bound.name, 'First') self.assertEqual(fifth.type, 'method') sixth = next(ast_nodes[5].infer()) self.assertIsInstance(sixth, bases.BoundMethod) self.assertEqual(sixth.bound.name, 'First') self.assertEqual(sixth.type, 'classmethod') def test_super_getattr_single_inheritance(self): ast_nodes = builder.extract_node(''' class First(object): def test(self): pass class Second(First): def test2(self): pass class Third(Second): test3 = 42 def __init__(self): super(Third, self).test2 #@ super(Third, self).test #@ # test3 is local, no MRO lookup is done. super(Third, self).test3 #@ super(Third, self) #@ # Unbounds. super(Third, Third).test2 #@ super(Third, Third).test #@ ''') first = next(ast_nodes[0].infer()) self.assertIsInstance(first, bases.BoundMethod) self.assertEqual(first.bound.name, 'Second') second = next(ast_nodes[1].infer()) self.assertIsInstance(second, bases.BoundMethod) self.assertEqual(second.bound.name, 'First') with self.assertRaises(exceptions.InferenceError): next(ast_nodes[2].infer()) fourth = next(ast_nodes[3].infer()) with self.assertRaises(exceptions.AttributeInferenceError): fourth.getattr('test3') with self.assertRaises(exceptions.AttributeInferenceError): next(fourth.igetattr('test3')) first_unbound = next(ast_nodes[4].infer()) self.assertIsInstance(first_unbound, nodes.FunctionDef) self.assertEqual(first_unbound.name, 'test2') self.assertEqual(first_unbound.parent.name, 'Second') second_unbound = next(ast_nodes[5].infer()) self.assertIsInstance(second_unbound, nodes.FunctionDef) self.assertEqual(second_unbound.name, 'test') self.assertEqual(second_unbound.parent.name, 'First') def test_super_invalid_mro(self): node = builder.extract_node(''' class A(object): test = 42 class Super(A, A): def __init__(self): super(Super, self) #@ ''') inferred = next(node.infer()) with self.assertRaises(exceptions.AttributeInferenceError): next(inferred.getattr('test')) def test_super_complex_mro(self): ast_nodes = builder.extract_node(''' class A(object): def spam(self): return "A" def foo(self): return "A" @staticmethod def static(self): pass class B(A): def boo(self): return "B" def spam(self): return "B" class C(A): def boo(self): return "C" class E(C, B): def __init__(self): super(E, self).boo #@ super(C, self).boo #@ super(E, self).spam #@ super(E, self).foo #@ super(E, self).static #@ ''') first = next(ast_nodes[0].infer()) self.assertIsInstance(first, bases.BoundMethod) self.assertEqual(first.bound.name, 'C') second = next(ast_nodes[1].infer()) self.assertIsInstance(second, bases.BoundMethod) self.assertEqual(second.bound.name, 'B') third = next(ast_nodes[2].infer()) self.assertIsInstance(third, bases.BoundMethod) self.assertEqual(third.bound.name, 'B') fourth = next(ast_nodes[3].infer()) self.assertEqual(fourth.bound.name, 'A') static = next(ast_nodes[4].infer()) self.assertIsInstance(static, nodes.FunctionDef) self.assertEqual(static.parent.scope().name, 'A') def test_super_data_model(self): ast_nodes = builder.extract_node(''' class X(object): pass class A(X): def __init__(self): super(A, self) #@ super(A, A) #@ super(X, A) #@ ''') first = next(ast_nodes[0].infer()) thisclass = first.getattr('__thisclass__')[0] self.assertIsInstance(thisclass, nodes.ClassDef) self.assertEqual(thisclass.name, 'A') selfclass = first.getattr('__self_class__')[0] self.assertIsInstance(selfclass, nodes.ClassDef) self.assertEqual(selfclass.name, 'A') self_ = first.getattr('__self__')[0] self.assertIsInstance(self_, bases.Instance) self.assertEqual(self_.name, 'A') cls = first.getattr('__class__')[0] self.assertEqual(cls, first._proxied) second = next(ast_nodes[1].infer()) thisclass = second.getattr('__thisclass__')[0] self.assertEqual(thisclass.name, 'A') self_ = second.getattr('__self__')[0] self.assertIsInstance(self_, nodes.ClassDef) self.assertEqual(self_.name, 'A') third = next(ast_nodes[2].infer()) thisclass = third.getattr('__thisclass__')[0] self.assertEqual(thisclass.name, 'X') selfclass = third.getattr('__self_class__')[0] self.assertEqual(selfclass.name, 'A') def assertEqualMro(self, klass, expected_mro): self.assertEqual( [member.name for member in klass.super_mro()], expected_mro) def test_super_mro(self): ast_nodes = builder.extract_node(''' class A(object): pass class B(A): pass class C(A): pass class E(C, B): def __init__(self): super(E, self) #@ super(C, self) #@ super(B, self) #@ super(B, 1) #@ super(1, B) #@ ''') first = next(ast_nodes[0].infer()) self.assertEqualMro(first, ['C', 'B', 'A', 'object']) second = next(ast_nodes[1].infer()) self.assertEqualMro(second, ['B', 'A', 'object']) third = next(ast_nodes[2].infer()) self.assertEqualMro(third, ['A', 'object']) fourth = next(ast_nodes[3].infer()) with self.assertRaises(exceptions.SuperError): fourth.super_mro() fifth = next(ast_nodes[4].infer()) with self.assertRaises(exceptions.SuperError): fifth.super_mro() def test_super_yes_objects(self): ast_nodes = builder.extract_node(''' from collections import Missing class A(object): def __init__(self): super(Missing, self) #@ super(A, Missing) #@ ''') first = next(ast_nodes[0].infer()) self.assertIsInstance(first, bases.Instance) second = next(ast_nodes[1].infer()) self.assertIsInstance(second, bases.Instance) def test_super_invalid_types(self): node = builder.extract_node(''' import collections class A(object): def __init__(self): super(A, collections) #@ ''') inferred = next(node.infer()) with self.assertRaises(exceptions.SuperError): inferred.super_mro() with self.assertRaises(exceptions.SuperError): inferred.super_mro() def test_super_properties(self): node = builder.extract_node(''' class Foo(object): @property def dict(self): return 42 class Bar(Foo): @property def dict(self): return super(Bar, self).dict Bar().dict ''') inferred = next(node.infer()) self.assertIsInstance(inferred, nodes.Const) self.assertEqual(inferred.value, 42) def test_super_qname(self): """Make sure a Super object generates a qname equivalent to super.__qname__ """ # See issue 533 code = """ class C: def foo(self): return super() C().foo() #@ """ super_obj = next(builder.extract_node(code).infer()) self.assertEqual(super_obj.qname(), "super") if __name__ == '__main__': unittest.main() astroid-2.0.1/astroid/tests/unittest_protocols.py0000644000076500000240000001714013324063433023100 0ustar claudiustaff00000000000000# -*- coding: utf-8 -*- # Copyright (c) 2015-2018 Claudiu Popa # Copyright (c) 2015-2016 Ceridwen # Copyright (c) 2016 Jakub Wilk # Copyright (c) 2017 Łukasz Rogalski # Copyright (c) 2018 Nick Drozd # Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html # For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER import contextlib import unittest import astroid from astroid import extract_node from astroid.test_utils import require_version from astroid import InferenceError from astroid import nodes from astroid import util from astroid.node_classes import AssignName, Const, Name, Starred @contextlib.contextmanager def _add_transform(manager, node, transform, predicate=None): manager.register_transform(node, transform, predicate) try: yield finally: manager.unregister_transform(node, transform, predicate) class ProtocolTests(unittest.TestCase): def assertConstNodesEqual(self, nodes_list_expected, nodes_list_got): self.assertEqual(len(nodes_list_expected), len(nodes_list_got)) for node in nodes_list_got: self.assertIsInstance(node, Const) for node, expected_value in zip(nodes_list_got, nodes_list_expected): self.assertEqual(expected_value, node.value) def assertNameNodesEqual(self, nodes_list_expected, nodes_list_got): self.assertEqual(len(nodes_list_expected), len(nodes_list_got)) for node in nodes_list_got: self.assertIsInstance(node, Name) for node, expected_name in zip(nodes_list_got, nodes_list_expected): self.assertEqual(expected_name, node.name) def test_assigned_stmts_simple_for(self): assign_stmts = extract_node(""" for a in (1, 2, 3): #@ pass for b in range(3): #@ pass """) for1_assnode = next(assign_stmts[0].nodes_of_class(AssignName)) assigned = list(for1_assnode.assigned_stmts()) self.assertConstNodesEqual([1, 2, 3], assigned) for2_assnode = next(assign_stmts[1].nodes_of_class(AssignName)) self.assertRaises(InferenceError, list, for2_assnode.assigned_stmts()) @require_version(minver='3.0') def test_assigned_stmts_starred_for(self): assign_stmts = extract_node(""" for *a, b in ((1, 2, 3), (4, 5, 6, 7)): #@ pass """) for1_starred = next(assign_stmts.nodes_of_class(Starred)) assigned = next(for1_starred.assigned_stmts()) assert isinstance(assigned, astroid.List) assert assigned.as_string() == '[1, 2]' def _get_starred_stmts(self, code): assign_stmt = extract_node("{} #@".format(code)) starred = next(assign_stmt.nodes_of_class(Starred)) return next(starred.assigned_stmts()) def _helper_starred_expected_const(self, code, expected): stmts = self._get_starred_stmts(code) self.assertIsInstance(stmts, nodes.List) stmts = stmts.elts self.assertConstNodesEqual(expected, stmts) def _helper_starred_expected(self, code, expected): stmts = self._get_starred_stmts(code) self.assertEqual(expected, stmts) def _helper_starred_inference_error(self, code): assign_stmt = extract_node("{} #@".format(code)) starred = next(assign_stmt.nodes_of_class(Starred)) self.assertRaises(InferenceError, list, starred.assigned_stmts()) @require_version(minver='3.0') def test_assigned_stmts_starred_assnames(self): self._helper_starred_expected_const( "a, *b = (1, 2, 3, 4) #@", [2, 3, 4]) self._helper_starred_expected_const( "*a, b = (1, 2, 3) #@", [1, 2]) self._helper_starred_expected_const( "a, *b, c = (1, 2, 3, 4, 5) #@", [2, 3, 4]) self._helper_starred_expected_const( "a, *b = (1, 2) #@", [2]) self._helper_starred_expected_const( "*b, a = (1, 2) #@", [1]) self._helper_starred_expected_const( "[*b] = (1, 2) #@", [1, 2]) @require_version(minver='3.0') def test_assigned_stmts_starred_yes(self): # Not something iterable and known self._helper_starred_expected("a, *b = range(3) #@", util.Uninferable) # Not something inferrable self._helper_starred_expected("a, *b = balou() #@", util.Uninferable) # In function, unknown. self._helper_starred_expected(""" def test(arg): head, *tail = arg #@""", util.Uninferable) # These cases aren't worth supporting. self._helper_starred_expected( "a, (*b, c), d = (1, (2, 3, 4), 5) #@", util.Uninferable) @require_version(minver='3.0') def test_assign_stmts_starred_fails(self): # Too many starred self._helper_starred_inference_error("a, *b, *c = (1, 2, 3) #@") # This could be solved properly, but it complicates needlessly the # code for assigned_stmts, without offering real benefit. self._helper_starred_inference_error( "(*a, b), (c, *d) = (1, 2, 3), (4, 5, 6) #@") def test_assigned_stmts_assignments(self): assign_stmts = extract_node(""" c = a #@ d, e = b, c #@ """) simple_assnode = next(assign_stmts[0].nodes_of_class(AssignName)) assigned = list(simple_assnode.assigned_stmts()) self.assertNameNodesEqual(['a'], assigned) assnames = assign_stmts[1].nodes_of_class(AssignName) simple_mul_assnode_1 = next(assnames) assigned = list(simple_mul_assnode_1.assigned_stmts()) self.assertNameNodesEqual(['b'], assigned) simple_mul_assnode_2 = next(assnames) assigned = list(simple_mul_assnode_2.assigned_stmts()) self.assertNameNodesEqual(['c'], assigned) @require_version(minver='3.6') def test_assigned_stmts_annassignments(self): annassign_stmts = extract_node(""" a: str = "abc" #@ b: str #@ """) simple_annassign_node = next(annassign_stmts[0].nodes_of_class(AssignName)) assigned = list(simple_annassign_node.assigned_stmts()) self.assertEqual(1, len(assigned)) self.assertIsInstance(assigned[0], Const) self.assertEqual(assigned[0].value, "abc") empty_annassign_node = next(annassign_stmts[1].nodes_of_class(AssignName)) assigned = list(empty_annassign_node.assigned_stmts()) self.assertEqual(1, len(assigned)) self.assertIs(assigned[0], util.Uninferable) def test_sequence_assigned_stmts_not_accepting_empty_node(self): def transform(node): node.root().locals['__all__'] = [node.value] manager = astroid.MANAGER with _add_transform(manager, astroid.Assign, transform): module = astroid.parse(''' __all__ = ['a'] ''') module.wildcard_import_names() def test_not_passing_uninferable_in_seq_inference(self): class Visitor: def visit(self, node): for child in node.get_children(): child.accept(self) visit_module = visit visit_assign = visit visit_binop = visit visit_list = visit visit_const = visit visit_name = visit def visit_assignname(self, node): for _ in node.infer(): pass parsed = extract_node(""" a = [] x = [a*2, a]*2*2 """) parsed.accept(Visitor()) if __name__ == '__main__': unittest.main() astroid-2.0.1/astroid/tests/unittest_python3.py0000644000076500000240000003307613324063433022466 0ustar claudiustaff00000000000000# -*- coding: utf-8 -*- # Copyright (c) 2010, 2013-2014 LOGILAB S.A. (Paris, FRANCE) # Copyright (c) 2012 FELD Boris # Copyright (c) 2013-2018 Claudiu Popa # Copyright (c) 2014 Google, Inc. # Copyright (c) 2015-2016 Ceridwen # Copyright (c) 2016 Jared Garst # Copyright (c) 2017 Hugo # Copyright (c) 2017 Łukasz Rogalski # Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html # For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER from textwrap import dedent import unittest from astroid import nodes from astroid.node_classes import Assign, Expr, YieldFrom, Name, Const from astroid.builder import AstroidBuilder, extract_node from astroid.scoped_nodes import ClassDef, FunctionDef from astroid.test_utils import require_version class Python3TC(unittest.TestCase): @classmethod def setUpClass(cls): cls.builder = AstroidBuilder() @require_version('3.4') def test_starred_notation(self): astroid = self.builder.string_build("*a, b = [1, 2, 3]", 'test', 'test') # Get the star node node = next(next(next(astroid.get_children()).get_children()).get_children()) self.assertTrue(isinstance(node.assign_type(), Assign)) @require_version('3.4') def test_yield_from(self): body = dedent(""" def func(): yield from iter([1, 2]) """) astroid = self.builder.string_build(body) func = astroid.body[0] self.assertIsInstance(func, FunctionDef) yieldfrom_stmt = func.body[0] self.assertIsInstance(yieldfrom_stmt, Expr) self.assertIsInstance(yieldfrom_stmt.value, YieldFrom) self.assertEqual(yieldfrom_stmt.as_string(), 'yield from iter([1, 2])') @require_version('3.4') def test_yield_from_is_generator(self): body = dedent(""" def func(): yield from iter([1, 2]) """) astroid = self.builder.string_build(body) func = astroid.body[0] self.assertIsInstance(func, FunctionDef) self.assertTrue(func.is_generator()) @require_version('3.4') def test_yield_from_as_string(self): body = dedent(""" def func(): yield from iter([1, 2]) value = yield from other() """) astroid = self.builder.string_build(body) func = astroid.body[0] self.assertEqual(func.as_string().strip(), body.strip()) # metaclass tests @require_version('3.4') def test_simple_metaclass(self): astroid = self.builder.string_build("class Test(metaclass=type): pass") klass = astroid.body[0] metaclass = klass.metaclass() self.assertIsInstance(metaclass, ClassDef) self.assertEqual(metaclass.name, 'type') @require_version('3.4') def test_metaclass_error(self): astroid = self.builder.string_build("class Test(metaclass=typ): pass") klass = astroid.body[0] self.assertFalse(klass.metaclass()) @require_version('3.4') def test_metaclass_imported(self): astroid = self.builder.string_build(dedent(""" from abc import ABCMeta class Test(metaclass=ABCMeta): pass""")) klass = astroid.body[1] metaclass = klass.metaclass() self.assertIsInstance(metaclass, ClassDef) self.assertEqual(metaclass.name, 'ABCMeta') @require_version('3.4') def test_metaclass_multiple_keywords(self): astroid = self.builder.string_build("class Test(magic=None, metaclass=type): pass") klass = astroid.body[0] metaclass = klass.metaclass() self.assertIsInstance(metaclass, ClassDef) self.assertEqual(metaclass.name, 'type') @require_version('3.4') def test_as_string(self): body = dedent(""" from abc import ABCMeta class Test(metaclass=ABCMeta): pass""") astroid = self.builder.string_build(body) klass = astroid.body[1] self.assertEqual(klass.as_string(), '\n\nclass Test(metaclass=ABCMeta):\n pass\n') @require_version('3.4') def test_old_syntax_works(self): astroid = self.builder.string_build(dedent(""" class Test: __metaclass__ = type class SubTest(Test): pass """)) klass = astroid['SubTest'] metaclass = klass.metaclass() self.assertIsNone(metaclass) @require_version('3.4') def test_metaclass_yes_leak(self): astroid = self.builder.string_build(dedent(""" # notice `ab` instead of `abc` from ab import ABCMeta class Meta(metaclass=ABCMeta): pass """)) klass = astroid['Meta'] self.assertIsNone(klass.metaclass()) @require_version('3.4') def test_parent_metaclass(self): astroid = self.builder.string_build(dedent(""" from abc import ABCMeta class Test(metaclass=ABCMeta): pass class SubTest(Test): pass """)) klass = astroid['SubTest'] self.assertTrue(klass.newstyle) metaclass = klass.metaclass() self.assertIsInstance(metaclass, ClassDef) self.assertEqual(metaclass.name, 'ABCMeta') @require_version('3.4') def test_metaclass_ancestors(self): astroid = self.builder.string_build(dedent(""" from abc import ABCMeta class FirstMeta(metaclass=ABCMeta): pass class SecondMeta(metaclass=type): pass class Simple: pass class FirstImpl(FirstMeta): pass class SecondImpl(FirstImpl): pass class ThirdImpl(Simple, SecondMeta): pass """)) classes = { 'ABCMeta': ('FirstImpl', 'SecondImpl'), 'type': ('ThirdImpl', ) } for metaclass, names in classes.items(): for name in names: impl = astroid[name] meta = impl.metaclass() self.assertIsInstance(meta, ClassDef) self.assertEqual(meta.name, metaclass) @require_version('3.4') def test_annotation_support(self): astroid = self.builder.string_build(dedent(""" def test(a: int, b: str, c: None, d, e, *args: float, **kwargs: int)->int: pass """)) func = astroid['test'] self.assertIsInstance(func.args.varargannotation, Name) self.assertEqual(func.args.varargannotation.name, 'float') self.assertIsInstance(func.args.kwargannotation, Name) self.assertEqual(func.args.kwargannotation.name, 'int') self.assertIsInstance(func.returns, Name) self.assertEqual(func.returns.name, 'int') arguments = func.args self.assertIsInstance(arguments.annotations[0], Name) self.assertEqual(arguments.annotations[0].name, 'int') self.assertIsInstance(arguments.annotations[1], Name) self.assertEqual(arguments.annotations[1].name, 'str') self.assertIsInstance(arguments.annotations[2], Const) self.assertIsNone(arguments.annotations[2].value) self.assertIsNone(arguments.annotations[3]) self.assertIsNone(arguments.annotations[4]) astroid = self.builder.string_build(dedent(""" def test(a: int=1, b: str=2): pass """)) func = astroid['test'] self.assertIsInstance(func.args.annotations[0], Name) self.assertEqual(func.args.annotations[0].name, 'int') self.assertIsInstance(func.args.annotations[1], Name) self.assertEqual(func.args.annotations[1].name, 'str') self.assertIsNone(func.returns) @require_version('3.4') def test_kwonlyargs_annotations_supper(self): node = self.builder.string_build(dedent(""" def test(*, a: int, b: str, c: None, d, e): pass """)) func = node['test'] arguments = func.args self.assertIsInstance(arguments.kwonlyargs_annotations[0], Name) self.assertEqual(arguments.kwonlyargs_annotations[0].name, 'int') self.assertIsInstance(arguments.kwonlyargs_annotations[1], Name) self.assertEqual(arguments.kwonlyargs_annotations[1].name, 'str') self.assertIsInstance(arguments.kwonlyargs_annotations[2], Const) self.assertIsNone(arguments.kwonlyargs_annotations[2].value) self.assertIsNone(arguments.kwonlyargs_annotations[3]) self.assertIsNone(arguments.kwonlyargs_annotations[4]) @require_version('3.4') def test_annotation_as_string(self): code1 = dedent(''' def test(a, b:int=4, c=2, f:'lala'=4)->2: pass''') code2 = dedent(''' def test(a:typing.Generic[T], c:typing.Any=24)->typing.Iterable: pass''') for code in (code1, code2): func = extract_node(code) self.assertEqual(func.as_string(), code) @require_version('3.5') def test_unpacking_in_dicts(self): code = "{'x': 1, **{'y': 2}}" node = extract_node(code) self.assertEqual(node.as_string(), code) keys = [key for (key, _) in node.items] self.assertIsInstance(keys[0], nodes.Const) self.assertIsInstance(keys[1], nodes.DictUnpack) @require_version('3.5') def test_nested_unpacking_in_dicts(self): code = "{'x': 1, **{'y': 2, **{'z': 3}}}" node = extract_node(code) self.assertEqual(node.as_string(), code) @require_version('3.5') def test_unpacking_in_dict_getitem(self): node = extract_node('{1:2, **{2:3, 3:4}, **{5: 6}}') for key, expected in ((1, 2), (2, 3), (3, 4), (5, 6)): value = node.getitem(nodes.Const(key)) self.assertIsInstance(value, nodes.Const) self.assertEqual(value.value, expected) @require_version('3.6') def test_format_string(self): code = "f'{greetings} {person}'" node = extract_node(code) self.assertEqual(node.as_string(), code) @require_version('3.6') def test_underscores_in_numeral_literal(self): pairs = [ ('10_1000', 101000), ('10_000_000', 10000000), ('0x_FF_FF', 65535), ] for value, expected in pairs: node = extract_node(value) inferred = next(node.infer()) self.assertIsInstance(inferred, nodes.Const) self.assertEqual(inferred.value, expected) @require_version('3.6') def test_async_comprehensions(self): async_comprehensions = [ extract_node("async def f(): return __([i async for i in aiter() if i % 2])"), extract_node("async def f(): return __({i async for i in aiter() if i % 2})"), extract_node("async def f(): return __((i async for i in aiter() if i % 2))"), extract_node("async def f(): return __({i: i async for i in aiter() if i % 2})") ] non_async_comprehensions = [ extract_node("async def f(): return __({i: i for i in iter() if i % 2})") ] for comp in async_comprehensions: self.assertTrue(comp.generators[0].is_async) for comp in non_async_comprehensions: self.assertFalse(comp.generators[0].is_async) @require_version('3.7') def test_async_comprehensions_outside_coroutine(self): # When async and await will become keywords, async comprehensions # will be allowed outside of coroutines body comprehensions = [ "[i async for i in aiter() if condition(i)]", "[await fun() async for fun in funcs]", "{await fun() async for fun in funcs}", "{fun: await fun() async for fun in funcs}", "[await fun() async for fun in funcs if await smth]", "{await fun() async for fun in funcs if await smth}", "{fun: await fun() async for fun in funcs if await smth}", "[await fun() async for fun in funcs]", "{await fun() async for fun in funcs}", "{fun: await fun() async for fun in funcs}", "[await fun() async for fun in funcs if await smth]", "{await fun() async for fun in funcs if await smth}", "{fun: await fun() async for fun in funcs if await smth}", ] for comp in comprehensions: node = extract_node(comp) self.assertTrue(node.generators[0].is_async) @require_version('3.6') def test_async_comprehensions_as_string(self): func_bodies = [ "return [i async for i in aiter() if condition(i)]", "return [await fun() for fun in funcs]", "return {await fun() for fun in funcs}", "return {fun: await fun() for fun in funcs}", "return [await fun() for fun in funcs if await smth]", "return {await fun() for fun in funcs if await smth}", "return {fun: await fun() for fun in funcs if await smth}", "return [await fun() async for fun in funcs]", "return {await fun() async for fun in funcs}", "return {fun: await fun() async for fun in funcs}", "return [await fun() async for fun in funcs if await smth]", "return {await fun() async for fun in funcs if await smth}", "return {fun: await fun() async for fun in funcs if await smth}", ] for func_body in func_bodies: code = dedent(''' async def f(): {}'''.format(func_body)) func = extract_node(code) self.assertEqual(func.as_string().strip(), code.strip()) if __name__ == '__main__': unittest.main() astroid-2.0.1/astroid/tests/unittest_raw_building.py0000644000076500000240000000565613324063433023533 0ustar claudiustaff00000000000000# Copyright (c) 2013 AndroWiiid # Copyright (c) 2014-2016, 2018 Claudiu Popa # Copyright (c) 2014 Google, Inc. # Copyright (c) 2015-2016 Ceridwen # Copyright (c) 2018 Anthony Sottile # Copyright (c) 2018 Bryce Guinta # Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html # For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER import platform import unittest from astroid.builder import AstroidBuilder from astroid.raw_building import ( attach_dummy_node, build_module, build_class, build_function, build_from_import ) from astroid import test_utils class RawBuildingTC(unittest.TestCase): def test_attach_dummy_node(self): node = build_module('MyModule') attach_dummy_node(node, 'DummyNode') self.assertEqual(1, len(list(node.get_children()))) def test_build_module(self): node = build_module('MyModule') self.assertEqual(node.name, 'MyModule') self.assertEqual(node.pure_python, False) self.assertEqual(node.package, False) self.assertEqual(node.parent, None) def test_build_class(self): node = build_class('MyClass') self.assertEqual(node.name, 'MyClass') self.assertEqual(node.doc, None) def test_build_function(self): node = build_function('MyFunction') self.assertEqual(node.name, 'MyFunction') self.assertEqual(node.doc, None) def test_build_function_args(self): args = ['myArgs1', 'myArgs2'] node = build_function('MyFunction', args) self.assertEqual('myArgs1', node.args.args[0].name) self.assertEqual('myArgs2', node.args.args[1].name) self.assertEqual(2, len(node.args.args)) def test_build_function_defaults(self): defaults = ['defaults1', 'defaults2'] node = build_function('MyFunction', None, defaults) self.assertEqual(2, len(node.args.defaults)) def test_build_from_import(self): names = ['exceptions, inference, inspector'] node = build_from_import('astroid', names) self.assertEqual(len(names), len(node.names)) @unittest.skipIf(platform.python_implementation() == 'PyPy', 'Only affects CPython') @test_utils.require_version(minver='3.0') def test_io_is__io(self): # _io module calls itself io. This leads # to cyclic dependencies when astroid tries to resolve # what io.BufferedReader is. The code that handles this # is in astroid.raw_building.imported_member, which verifies # the true name of the module. import _io builder = AstroidBuilder() module = builder.inspect_build(_io) buffered_reader = module.getattr('BufferedReader')[0] self.assertEqual(buffered_reader.root().name, 'io') if __name__ == '__main__': unittest.main() astroid-2.0.1/astroid/tests/unittest_regrtest.py0000644000076500000240000002517113324063433022716 0ustar claudiustaff00000000000000# Copyright (c) 2006-2008, 2010-2014 LOGILAB S.A. (Paris, FRANCE) # Copyright (c) 2007 Marien Zwart # Copyright (c) 2013-2014 Google, Inc. # Copyright (c) 2014-2016, 2018 Claudiu Popa # Copyright (c) 2014 Eevee (Alex Munroe) # Copyright (c) 2015-2016 Ceridwen # Copyright (c) 2016 Jakub Wilk # Copyright (c) 2018 Nick Drozd # Copyright (c) 2018 Anthony Sottile # Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html # For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER import sys import unittest import textwrap from astroid import MANAGER, Instance, nodes from astroid.bases import BUILTINS from astroid.builder import AstroidBuilder, extract_node from astroid import exceptions from astroid.raw_building import build_module from astroid.manager import AstroidManager from astroid.test_utils import require_version from astroid.tests import resources from astroid import transforms class NonRegressionTests(resources.AstroidCacheSetupMixin, unittest.TestCase): def setUp(self): sys.path.insert(0, resources.find('data')) MANAGER.always_load_extensions = True MANAGER.astroid_cache[BUILTINS] = self._builtins def tearDown(self): # Since we may have created a brainless manager, leading # to a new cache builtin module and proxy classes in the constants, # clear out the global manager cache. MANAGER.clear_cache(self._builtins) MANAGER.always_load_extensions = False sys.path.pop(0) sys.path_importer_cache.pop(resources.find('data'), None) def brainless_manager(self): manager = AstroidManager() # avoid caching into the AstroidManager borg since we get problems # with other tests : manager.__dict__ = {} manager._failed_import_hooks = [] manager.astroid_cache = {} manager._mod_file_cache = {} manager._transform = transforms.TransformVisitor() manager.clear_cache() # trigger proper bootstraping return manager def test_module_path(self): man = self.brainless_manager() mod = man.ast_from_module_name('package.import_package_subpackage_module') package = next(mod.igetattr('package')) self.assertEqual(package.name, 'package') subpackage = next(package.igetattr('subpackage')) self.assertIsInstance(subpackage, nodes.Module) self.assertTrue(subpackage.package) self.assertEqual(subpackage.name, 'package.subpackage') module = next(subpackage.igetattr('module')) self.assertEqual(module.name, 'package.subpackage.module') def test_package_sidepackage(self): manager = self.brainless_manager() assert 'package.sidepackage' not in MANAGER.astroid_cache package = manager.ast_from_module_name('absimp') self.assertIsInstance(package, nodes.Module) self.assertTrue(package.package) subpackage = next(package.getattr('sidepackage')[0].infer()) self.assertIsInstance(subpackage, nodes.Module) self.assertTrue(subpackage.package) self.assertEqual(subpackage.name, 'absimp.sidepackage') def test_living_property(self): builder = AstroidBuilder() builder._done = {} builder._module = sys.modules[__name__] builder.object_build(build_module('module_name', ''), Whatever) def test_new_style_class_detection(self): try: import pygtk # pylint: disable=unused-variable except ImportError: self.skipTest('test skipped: pygtk is not available') # XXX may fail on some pygtk version, because objects in # gobject._gobject have __module__ set to gobject :( builder = AstroidBuilder() data = """ import pygtk pygtk.require("2.6") import gobject class A(gobject.GObject): pass """ astroid = builder.string_build(data, __name__, __file__) a = astroid['A'] self.assertTrue(a.newstyle) def test_numpy_crash(self): """test don't crash on numpy""" #a crash occurred somewhere in the past, and an # InferenceError instead of a crash was better, but now we even infer! try: import numpy # pylint: disable=unused-variable except ImportError: self.skipTest('test skipped: numpy is not available') builder = AstroidBuilder() data = """ from numpy import multiply multiply(1, 2, 3) """ astroid = builder.string_build(data, __name__, __file__) callfunc = astroid.body[1].value.func inferred = callfunc.inferred() self.assertEqual(len(inferred), 1) @require_version('3.0') def test_nameconstant(self): # used to fail for Python 3.4 builder = AstroidBuilder() astroid = builder.string_build("def test(x=True): pass") default = astroid.body[0].args.args[0] self.assertEqual(default.name, 'x') self.assertEqual(next(default.infer()).value, True) def test_recursion_regression_issue25(self): builder = AstroidBuilder() data = """ import recursion as base _real_Base = base.Base class Derived(_real_Base): pass def run(): base.Base = Derived """ astroid = builder.string_build(data, __name__, __file__) # Used to crash in _is_metaclass, due to wrong # ancestors chain classes = astroid.nodes_of_class(nodes.ClassDef) for klass in classes: # triggers the _is_metaclass call klass.type # pylint: disable=pointless-statement def test_decorator_callchain_issue42(self): builder = AstroidBuilder() data = """ def test(): def factory(func): def newfunc(): func() return newfunc return factory @test() def crash(): pass """ astroid = builder.string_build(data, __name__, __file__) self.assertEqual(astroid['crash'].type, 'function') def test_filter_stmts_scoping(self): builder = AstroidBuilder() data = """ def test(): compiler = int() class B(compiler.__class__): pass compiler = B() return compiler """ astroid = builder.string_build(data, __name__, __file__) test = astroid['test'] result = next(test.infer_call_result(astroid)) self.assertIsInstance(result, Instance) base = next(result._proxied.bases[0].infer()) self.assertEqual(base.name, 'int') def test_ancestors_patching_class_recursion(self): node = AstroidBuilder().string_build(textwrap.dedent(""" import string Template = string.Template class A(Template): pass class B(A): pass def test(x=False): if x: string.Template = A else: string.Template = B """)) klass = node['A'] ancestors = list(klass.ancestors()) self.assertEqual(ancestors[0].qname(), 'string.Template') def test_ancestors_yes_in_bases(self): # Test for issue https://bitbucket.org/logilab/astroid/issue/84 # This used to crash astroid with a TypeError, because an Uninferable # node was present in the bases node = extract_node(""" def with_metaclass(meta, *bases): class metaclass(meta): def __new__(cls, name, this_bases, d): return meta(name, bases, d) return type.__new__(metaclass, 'temporary_class', (), {}) import lala class A(with_metaclass(object, lala.lala)): #@ pass """) ancestors = list(node.ancestors()) self.assertEqual(len(ancestors), 1) self.assertEqual(ancestors[0].qname(), "{}.object".format(BUILTINS)) def test_ancestors_missing_from_function(self): # Test for https://www.logilab.org/ticket/122793 node = extract_node(''' def gen(): yield GEN = gen() next(GEN) ''') self.assertRaises(exceptions.InferenceError, next, node.infer()) def test_unicode_in_docstring(self): # Crashed for astroid==1.4.1 # Test for https://bitbucket.org/logilab/astroid/issues/273/ # In a regular file, "coding: utf-8" would have been used. node = extract_node(''' from __future__ import unicode_literals class MyClass(object): def method(self): "With unicode : %s " instance = MyClass() ''' % "\u2019") next(node.value.infer()).as_string() def test_binop_generates_nodes_with_parents(self): node = extract_node(''' def no_op(*args): pass def foo(*args): def inner(*more_args): args + more_args #@ return inner ''') inferred = next(node.infer()) self.assertIsInstance(inferred, nodes.Tuple) self.assertIsNotNone(inferred.parent) self.assertIsInstance(inferred.parent, nodes.BinOp) def test_decorator_names_inference_error_leaking(self): node = extract_node(''' class Parent(object): @property def foo(self): pass class Child(Parent): @Parent.foo.getter def foo(self): #@ return super(Child, self).foo + ['oink'] ''') inferred = next(node.infer()) self.assertEqual(inferred.decoratornames(), set()) def test_ssl_protocol(self): node = extract_node(''' import ssl ssl.PROTOCOL_TLSv1 ''') inferred = next(node.infer()) self.assertIsInstance(inferred, nodes.Const) def test_recursive_property_method(self): node = extract_node(''' class APropert(): @property def property(self): return self APropert().property ''') next(node.infer()) def test_uninferable_string_argument_of_namedtuple(self): node = extract_node(''' import collections collections.namedtuple('{}'.format("a"), '')() ''') next(node.infer()) @require_version(maxver='3.0') def test_reassignment_in_except_handler(self): node = extract_node(''' import exceptions try: {}["a"] except KeyError, exceptions.IndexError: pass IndexError #@ ''') self.assertEqual(len(node.inferred()), 1) class Whatever: a = property(lambda x: x, lambda x: x) if __name__ == '__main__': unittest.main() astroid-2.0.1/astroid/tests/unittest_scoped_nodes.py0000644000076500000240000020144413324063433023523 0ustar claudiustaff00000000000000# -*- coding: utf-8 -*- # Copyright (c) 2006-2014 LOGILAB S.A. (Paris, FRANCE) # Copyright (c) 2011, 2013-2015 Google, Inc. # Copyright (c) 2013-2018 Claudiu Popa # Copyright (c) 2013 Phil Schaf # Copyright (c) 2014 Eevee (Alex Munroe) # Copyright (c) 2015-2016 Ceridwen # Copyright (c) 2015 Rene Zhang # Copyright (c) 2015 Florian Bruhin # Copyright (c) 2015 Philip Lorenz # Copyright (c) 2016 Jakub Wilk # Copyright (c) 2017-2018 Bryce Guinta # Copyright (c) 2017 Łukasz Rogalski # Copyright (c) 2017 Derek Gustafson # Copyright (c) 2018 brendanator # Copyright (c) 2018 Anthony Sottile # Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html # For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER """tests for specific behaviour of astroid scoped nodes (i.e. module, class and function) """ import os import sys from functools import partial import unittest from astroid import builder from astroid import nodes from astroid import scoped_nodes from astroid import util from astroid.exceptions import ( InferenceError, AttributeInferenceError, NoDefault, ResolveError, MroError, InconsistentMroError, DuplicateBasesError, TooManyLevelsError, NameInferenceError ) from astroid.bases import ( BUILTINS, Instance, BoundMethod, UnboundMethod, Generator ) from astroid import test_utils from astroid.tests import resources def _test_dict_interface(self, node, test_attr): self.assertIs(node[test_attr], node[test_attr]) self.assertIn(test_attr, node) node.keys() node.values() node.items() iter(node) class ModuleLoader(resources.SysPathSetup): def setUp(self): super(ModuleLoader, self).setUp() self.module = resources.build_file('data/module.py', 'data.module') self.module2 = resources.build_file('data/module2.py', 'data.module2') self.nonregr = resources.build_file('data/nonregr.py', 'data.nonregr') self.pack = resources.build_file('data/__init__.py', 'data') class ModuleNodeTest(ModuleLoader, unittest.TestCase): def test_special_attributes(self): self.assertEqual(len(self.module.getattr('__name__')), 1) self.assertIsInstance(self.module.getattr('__name__')[0], nodes.Const) self.assertEqual(self.module.getattr('__name__')[0].value, 'data.module') self.assertEqual(len(self.module.getattr('__doc__')), 1) self.assertIsInstance(self.module.getattr('__doc__')[0], nodes.Const) self.assertEqual(self.module.getattr('__doc__')[0].value, 'test module for astroid\n') self.assertEqual(len(self.module.getattr('__file__')), 1) self.assertIsInstance(self.module.getattr('__file__')[0], nodes.Const) self.assertEqual(self.module.getattr('__file__')[0].value, os.path.abspath(resources.find('data/module.py'))) self.assertEqual(len(self.module.getattr('__dict__')), 1) self.assertIsInstance(self.module.getattr('__dict__')[0], nodes.Dict) self.assertRaises(AttributeInferenceError, self.module.getattr, '__path__') self.assertEqual(len(self.pack.getattr('__path__')), 1) self.assertIsInstance(self.pack.getattr('__path__')[0], nodes.List) def test_dict_interface(self): _test_dict_interface(self, self.module, 'YO') def test_getattr(self): yo = self.module.getattr('YO')[0] self.assertIsInstance(yo, nodes.ClassDef) self.assertEqual(yo.name, 'YO') red = next(self.module.igetattr('redirect')) self.assertIsInstance(red, nodes.FunctionDef) self.assertEqual(red.name, 'four_args') namenode = next(self.module.igetattr('NameNode')) self.assertIsInstance(namenode, nodes.ClassDef) self.assertEqual(namenode.name, 'Name') # resolve packageredirection mod = resources.build_file('data/appl/myConnection.py', 'data.appl.myConnection') ssl = next(mod.igetattr('SSL1')) cnx = next(ssl.igetattr('Connection')) self.assertEqual(cnx.__class__, nodes.ClassDef) self.assertEqual(cnx.name, 'Connection') self.assertEqual(cnx.root().name, 'data.SSL1.Connection1') self.assertEqual(len(self.nonregr.getattr('enumerate')), 2) self.assertRaises(InferenceError, self.nonregr.igetattr, 'YOAA') def test_wildcard_import_names(self): m = resources.build_file('data/all.py', 'all') self.assertEqual(m.wildcard_import_names(), ['Aaa', '_bla', 'name']) m = resources.build_file('data/notall.py', 'notall') res = sorted(m.wildcard_import_names()) self.assertEqual(res, ['Aaa', 'func', 'name', 'other']) def test_public_names(self): m = builder.parse(''' name = 'a' _bla = 2 other = 'o' class Aaa: pass def func(): print('yo') __all__ = 'Aaa', '_bla', 'name' ''') values = sorted(['Aaa', 'name', 'other', 'func']) self.assertEqual(sorted(m.public_names()), values) m = builder.parse(''' name = 'a' _bla = 2 other = 'o' class Aaa: pass def func(): return 'yo' ''') res = sorted(m.public_names()) self.assertEqual(res, values) m = builder.parse(''' from missing import tzop trop = "test" __all__ = (trop, "test1", tzop, 42) ''') res = sorted(m.public_names()) self.assertEqual(res, ["trop", "tzop"]) m = builder.parse(''' test = tzop = 42 __all__ = ('test', ) + ('tzop', ) ''') res = sorted(m.public_names()) self.assertEqual(res, ['test', 'tzop']) def test_module_getattr(self): data = ''' appli = application appli += 2 del appli ''' astroid = builder.parse(data, __name__) # test del statement not returned by getattr self.assertEqual(len(astroid.getattr('appli')), 2, astroid.getattr('appli')) def test_relative_to_absolute_name(self): # package mod = nodes.Module('very.multi.package', 'doc') mod.package = True modname = mod.relative_to_absolute_name('utils', 1) self.assertEqual(modname, 'very.multi.package.utils') modname = mod.relative_to_absolute_name('utils', 2) self.assertEqual(modname, 'very.multi.utils') modname = mod.relative_to_absolute_name('utils', 0) self.assertEqual(modname, 'very.multi.package.utils') modname = mod.relative_to_absolute_name('', 1) self.assertEqual(modname, 'very.multi.package') # non package mod = nodes.Module('very.multi.module', 'doc') mod.package = False modname = mod.relative_to_absolute_name('utils', 0) self.assertEqual(modname, 'very.multi.utils') modname = mod.relative_to_absolute_name('utils', 1) self.assertEqual(modname, 'very.multi.utils') modname = mod.relative_to_absolute_name('utils', 2) self.assertEqual(modname, 'very.utils') modname = mod.relative_to_absolute_name('', 1) self.assertEqual(modname, 'very.multi') def test_relative_to_absolute_name_beyond_top_level(self): mod = nodes.Module('a.b.c', '') mod.package = True for level in (5, 4): with self.assertRaises(TooManyLevelsError) as cm: mod.relative_to_absolute_name('test', level) expected = ("Relative import with too many levels " "({level}) for module {name!r}".format( level=level - 1, name=mod.name)) self.assertEqual(expected, str(cm.exception)) def test_import_1(self): data = '''from . import subpackage''' sys.path.insert(0, resources.find('data')) astroid = builder.parse(data, 'package', 'data/package/__init__.py') try: m = astroid.import_module('', level=1) self.assertEqual(m.name, 'package') inferred = list(astroid.igetattr('subpackage')) self.assertEqual(len(inferred), 1) self.assertEqual(inferred[0].name, 'package.subpackage') finally: del sys.path[0] def test_import_2(self): data = '''from . import subpackage as pouet''' astroid = builder.parse(data, 'package', 'data/package/__init__.py') sys.path.insert(0, resources.find('data')) try: m = astroid.import_module('', level=1) self.assertEqual(m.name, 'package') inferred = list(astroid.igetattr('pouet')) self.assertEqual(len(inferred), 1) self.assertEqual(inferred[0].name, 'package.subpackage') finally: del sys.path[0] def test_file_stream_in_memory(self): data = '''irrelevant_variable is irrelevant''' astroid = builder.parse(data, 'in_memory') with astroid.stream() as stream: self.assertEqual(stream.read().decode(), data) def test_file_stream_physical(self): path = resources.find('data/all.py') astroid = builder.AstroidBuilder().file_build(path, 'all') with open(path, 'rb') as file_io: with astroid.stream() as stream: self.assertEqual(stream.read(), file_io.read()) def test_file_stream_api(self): path = resources.find('data/all.py') astroid = builder.AstroidBuilder().file_build(path, 'all') with self.assertRaises(AttributeError): # pylint: disable=pointless-statement,no-member astroid.file_stream def test_stream_api(self): path = resources.find('data/all.py') astroid = builder.AstroidBuilder().file_build(path, 'all') stream = astroid.stream() self.assertTrue(hasattr(stream, 'close')) with stream: with open(path, 'rb') as file_io: self.assertEqual(stream.read(), file_io.read()) class FunctionNodeTest(ModuleLoader, unittest.TestCase): def test_special_attributes(self): func = self.module2['make_class'] self.assertEqual(len(func.getattr('__name__')), 1) self.assertIsInstance(func.getattr('__name__')[0], nodes.Const) self.assertEqual(func.getattr('__name__')[0].value, 'make_class') self.assertEqual(len(func.getattr('__doc__')), 1) self.assertIsInstance(func.getattr('__doc__')[0], nodes.Const) self.assertEqual(func.getattr('__doc__')[0].value, 'check base is correctly resolved to Concrete0') self.assertEqual(len(self.module.getattr('__dict__')), 1) self.assertIsInstance(self.module.getattr('__dict__')[0], nodes.Dict) def test_dict_interface(self): _test_dict_interface(self, self.module['global_access'], 'local') def test_default_value(self): func = self.module2['make_class'] self.assertIsInstance(func.args.default_value('base'), nodes.Attribute) self.assertRaises(NoDefault, func.args.default_value, 'args') self.assertRaises(NoDefault, func.args.default_value, 'kwargs') self.assertRaises(NoDefault, func.args.default_value, 'any') #self.assertIsInstance(func.mularg_class('args'), nodes.Tuple) #self.assertIsInstance(func.mularg_class('kwargs'), nodes.Dict) #self.assertIsNone(func.mularg_class('base')) def test_navigation(self): function = self.module['global_access'] self.assertEqual(function.statement(), function) l_sibling = function.previous_sibling() # check taking parent if child is not a stmt self.assertIsInstance(l_sibling, nodes.Assign) child = function.args.args[0] self.assertIs(l_sibling, child.previous_sibling()) r_sibling = function.next_sibling() self.assertIsInstance(r_sibling, nodes.ClassDef) self.assertEqual(r_sibling.name, 'YO') self.assertIs(r_sibling, child.next_sibling()) last = r_sibling.next_sibling().next_sibling().next_sibling() self.assertIsInstance(last, nodes.Assign) self.assertIsNone(last.next_sibling()) first = l_sibling.root().body[0] self.assertIsNone(first.previous_sibling()) def test_nested_args(self): if sys.version_info >= (3, 0): self.skipTest("nested args has been removed in py3.x") code = ''' def nested_args(a, (b, c, d)): "nested arguments test" ''' tree = builder.parse(code) func = tree['nested_args'] self.assertEqual(sorted(func.locals), ['a', 'b', 'c', 'd']) self.assertEqual(func.args.format_args(), 'a, (b, c, d)') def test_four_args(self): func = self.module['four_args'] local = sorted(func.keys()) self.assertEqual(local, ['a', 'b', 'c', 'd']) self.assertEqual(func.type, 'function') def test_format_args(self): func = self.module2['make_class'] self.assertEqual(func.args.format_args(), 'any, base=data.module.YO, *args, **kwargs') func = self.module['four_args'] self.assertEqual(func.args.format_args(), 'a, b, c, d') @test_utils.require_version('3.0') def test_format_args_keyword_only_args(self): node = builder.parse(''' def test(a: int, *, b: dict): pass ''').body[-1].args formatted = node.format_args() self.assertEqual(formatted, 'a:int, *, b:dict') def test_is_generator(self): self.assertTrue(self.module2['generator'].is_generator()) self.assertFalse(self.module2['not_a_generator'].is_generator()) self.assertFalse(self.module2['make_class'].is_generator()) def test_is_abstract(self): method = self.module2['AbstractClass']['to_override'] self.assertTrue(method.is_abstract(pass_is_abstract=False)) self.assertEqual(method.qname(), 'data.module2.AbstractClass.to_override') self.assertEqual(method.pytype(), '%s.instancemethod' % BUILTINS) method = self.module2['AbstractClass']['return_something'] self.assertFalse(method.is_abstract(pass_is_abstract=False)) # non regression : test raise "string" doesn't cause an exception in is_abstract func = self.module2['raise_string'] self.assertFalse(func.is_abstract(pass_is_abstract=False)) def test_is_abstract_decorated(self): methods = builder.extract_node(""" import abc class Klass(object): @abc.abstractproperty def prop(self): #@ pass @abc.abstractmethod def method1(self): #@ pass some_other_decorator = lambda x: x @some_other_decorator def method2(self): #@ pass """) self.assertTrue(methods[0].is_abstract(pass_is_abstract=False)) self.assertTrue(methods[1].is_abstract(pass_is_abstract=False)) self.assertFalse(methods[2].is_abstract(pass_is_abstract=False)) ## def test_raises(self): ## method = self.module2['AbstractClass']['to_override'] ## self.assertEqual([str(term) for term in method.raises()], ## ["Call(Name('NotImplementedError'), [], None, None)"] ) ## def test_returns(self): ## method = self.module2['AbstractClass']['return_something'] ## # use string comp since Node doesn't handle __cmp__ ## self.assertEqual([str(term) for term in method.returns()], ## ["Const('toto')", "Const(None)"]) def test_lambda_pytype(self): data = ''' def f(): g = lambda: None ''' astroid = builder.parse(data) g = list(astroid['f'].ilookup('g'))[0] self.assertEqual(g.pytype(), '%s.function' % BUILTINS) def test_lambda_qname(self): astroid = builder.parse('lmbd = lambda: None', __name__) self.assertEqual('%s.' % __name__, astroid['lmbd'].parent.value.qname()) def test_is_method(self): data = ''' class A: def meth1(self): return 1 @classmethod def meth2(cls): return 2 @staticmethod def meth3(): return 3 def function(): return 0 @staticmethod def sfunction(): return -1 ''' astroid = builder.parse(data) self.assertTrue(astroid['A']['meth1'].is_method()) self.assertTrue(astroid['A']['meth2'].is_method()) self.assertTrue(astroid['A']['meth3'].is_method()) self.assertFalse(astroid['function'].is_method()) self.assertFalse(astroid['sfunction'].is_method()) def test_argnames(self): if sys.version_info < (3, 0): code = 'def f(a, (b, c), *args, **kwargs): pass' else: code = 'def f(a, b, c, *args, **kwargs): pass' astroid = builder.parse(code, __name__) self.assertEqual(astroid['f'].argnames(), ['a', 'b', 'c', 'args', 'kwargs']) def test_return_nothing(self): """test inferred value on a function with empty return""" data = ''' def func(): return a = func() ''' astroid = builder.parse(data) call = astroid.body[1].value func_vals = call.inferred() self.assertEqual(len(func_vals), 1) self.assertIsInstance(func_vals[0], nodes.Const) self.assertIsNone(func_vals[0].value) def test_func_instance_attr(self): """test instance attributes for functions""" data = """ def test(): print(test.bar) test.bar = 1 test() """ astroid = builder.parse(data, 'mod') func = astroid.body[2].value.func.inferred()[0] self.assertIsInstance(func, nodes.FunctionDef) self.assertEqual(func.name, 'test') one = func.getattr('bar')[0].inferred()[0] self.assertIsInstance(one, nodes.Const) self.assertEqual(one.value, 1) def test_type_builtin_descriptor_subclasses(self): astroid = builder.parse(""" class classonlymethod(classmethod): pass class staticonlymethod(staticmethod): pass class Node: @classonlymethod def clsmethod_subclass(cls): pass @classmethod def clsmethod(cls): pass @staticonlymethod def staticmethod_subclass(cls): pass @staticmethod def stcmethod(cls): pass """) node = astroid.locals['Node'][0] self.assertEqual(node.locals['clsmethod_subclass'][0].type, 'classmethod') self.assertEqual(node.locals['clsmethod'][0].type, 'classmethod') self.assertEqual(node.locals['staticmethod_subclass'][0].type, 'staticmethod') self.assertEqual(node.locals['stcmethod'][0].type, 'staticmethod') def test_decorator_builtin_descriptors(self): astroid = builder.parse(""" def static_decorator(platform=None, order=50): def wrapper(f): f.cgm_module = True f.cgm_module_order = order f.cgm_module_platform = platform return staticmethod(f) return wrapper def long_classmethod_decorator(platform=None, order=50): def wrapper(f): def wrapper2(f): def wrapper3(f): f.cgm_module = True f.cgm_module_order = order f.cgm_module_platform = platform return classmethod(f) return wrapper3(f) return wrapper2(f) return wrapper def classmethod_decorator(platform=None): def wrapper(f): f.platform = platform return classmethod(f) return wrapper def classmethod_wrapper(fn): def wrapper(cls, *args, **kwargs): result = fn(cls, *args, **kwargs) return result return classmethod(wrapper) def staticmethod_wrapper(fn): def wrapper(*args, **kwargs): return fn(*args, **kwargs) return staticmethod(wrapper) class SomeClass(object): @static_decorator() def static(node, cfg): pass @classmethod_decorator() def classmethod(cls): pass @static_decorator def not_so_static(node): pass @classmethod_decorator def not_so_classmethod(node): pass @classmethod_wrapper def classmethod_wrapped(cls): pass @staticmethod_wrapper def staticmethod_wrapped(): pass @long_classmethod_decorator() def long_classmethod(cls): pass """) node = astroid.locals['SomeClass'][0] self.assertEqual(node.locals['static'][0].type, 'staticmethod') self.assertEqual(node.locals['classmethod'][0].type, 'classmethod') self.assertEqual(node.locals['not_so_static'][0].type, 'method') self.assertEqual(node.locals['not_so_classmethod'][0].type, 'method') self.assertEqual(node.locals['classmethod_wrapped'][0].type, 'classmethod') self.assertEqual(node.locals['staticmethod_wrapped'][0].type, 'staticmethod') self.assertEqual(node.locals['long_classmethod'][0].type, 'classmethod') def test_igetattr(self): func = builder.extract_node(''' def test(): pass ''') func.instance_attrs['value'] = [nodes.Const(42)] value = func.getattr('value') self.assertEqual(len(value), 1) self.assertIsInstance(value[0], nodes.Const) self.assertEqual(value[0].value, 42) inferred = next(func.igetattr('value')) self.assertIsInstance(inferred, nodes.Const) self.assertEqual(inferred.value, 42) @test_utils.require_version(minver='3.0') def test_return_annotation_is_not_the_last(self): func = builder.extract_node(''' def test() -> bytes: pass pass return ''') last_child = func.last_child() self.assertIsInstance(last_child, nodes.Return) self.assertEqual(func.tolineno, 5) @test_utils.require_version(minver='3.6') def test_method_init_subclass(self): klass = builder.extract_node(''' class MyClass: def __init_subclass__(cls): pass ''') method = klass['__init_subclass__'] self.assertEqual([n.name for n in method.args.args], ['cls']) self.assertEqual(method.type, 'classmethod') @test_utils.require_version(minver='3.0') def test_dunder_class_local_to_method(self): node = builder.extract_node(''' class MyClass: def test(self): __class__ #@ ''') inferred = next(node.infer()) self.assertIsInstance(inferred, nodes.ClassDef) self.assertEqual(inferred.name, 'MyClass') @test_utils.require_version(minver='3.0') def test_dunder_class_local_to_function(self): node = builder.extract_node(''' def test(self): __class__ #@ ''') with self.assertRaises(NameInferenceError): next(node.infer()) @test_utils.require_version(minver='3.0') def test_dunder_class_local_to_classmethod(self): node = builder.extract_node(''' class MyClass: @classmethod def test(cls): __class__ #@ ''') inferred = next(node.infer()) self.assertIsInstance(inferred, nodes.ClassDef) self.assertEqual(inferred.name, 'MyClass') class ClassNodeTest(ModuleLoader, unittest.TestCase): def test_dict_interface(self): _test_dict_interface(self, self.module['YOUPI'], 'method') def test_cls_special_attributes_1(self): cls = self.module['YO'] self.assertEqual(len(cls.getattr('__bases__')), 1) self.assertEqual(len(cls.getattr('__name__')), 1) self.assertIsInstance(cls.getattr('__name__')[0], nodes.Const) self.assertEqual(cls.getattr('__name__')[0].value, 'YO') self.assertEqual(len(cls.getattr('__doc__')), 1) self.assertIsInstance(cls.getattr('__doc__')[0], nodes.Const) self.assertEqual(cls.getattr('__doc__')[0].value, 'hehe\n haha') # YO is an old styled class for Python 2.7 # May want to stop locals from referencing namespaced variables in the future module_attr_num = 4 if sys.version_info > (3, 3) else 1 self.assertEqual(len(cls.getattr('__module__')), module_attr_num) self.assertIsInstance(cls.getattr('__module__')[0], nodes.Const) self.assertEqual(cls.getattr('__module__')[0].value, 'data.module') self.assertEqual(len(cls.getattr('__dict__')), 1) if not cls.newstyle: self.assertRaises(AttributeInferenceError, cls.getattr, '__mro__') for cls in (nodes.List._proxied, nodes.Const(1)._proxied): self.assertEqual(len(cls.getattr('__bases__')), 1) self.assertEqual(len(cls.getattr('__name__')), 1) self.assertEqual(len(cls.getattr('__doc__')), 1, (cls, cls.getattr('__doc__'))) self.assertEqual(cls.getattr('__doc__')[0].value, cls.doc) self.assertEqual(len(cls.getattr('__module__')), 4) self.assertEqual(len(cls.getattr('__dict__')), 1) self.assertEqual(len(cls.getattr('__mro__')), 1) def test__mro__attribute(self): node = builder.extract_node(''' class A(object): pass class B(object): pass class C(A, B): pass ''') mro = node.getattr('__mro__')[0] self.assertIsInstance(mro, nodes.Tuple) self.assertEqual(mro.elts, node.mro()) def test__bases__attribute(self): node = builder.extract_node(''' class A(object): pass class B(object): pass class C(A, B): pass class D(C): pass ''') bases = node.getattr('__bases__')[0] self.assertIsInstance(bases, nodes.Tuple) self.assertEqual(len(bases.elts), 1) self.assertIsInstance(bases.elts[0], nodes.ClassDef) self.assertEqual(bases.elts[0].name, 'C') def test_cls_special_attributes_2(self): astroid = builder.parse(''' class A(object): pass class B(object): pass A.__bases__ += (B,) ''', __name__) self.assertEqual(len(astroid['A'].getattr('__bases__')), 2) self.assertIsInstance(astroid['A'].getattr('__bases__')[1], nodes.Tuple) self.assertIsInstance(astroid['A'].getattr('__bases__')[0], nodes.AssignAttr) def test_instance_special_attributes(self): for inst in (Instance(self.module['YO']), nodes.List(), nodes.Const(1)): self.assertRaises(AttributeInferenceError, inst.getattr, '__mro__') self.assertRaises(AttributeInferenceError, inst.getattr, '__bases__') self.assertRaises(AttributeInferenceError, inst.getattr, '__name__') self.assertEqual(len(inst.getattr('__dict__')), 1) self.assertEqual(len(inst.getattr('__doc__')), 1) def test_navigation(self): klass = self.module['YO'] self.assertEqual(klass.statement(), klass) l_sibling = klass.previous_sibling() self.assertTrue(isinstance(l_sibling, nodes.FunctionDef), l_sibling) self.assertEqual(l_sibling.name, 'global_access') r_sibling = klass.next_sibling() self.assertIsInstance(r_sibling, nodes.ClassDef) self.assertEqual(r_sibling.name, 'YOUPI') def test_local_attr_ancestors(self): module = builder.parse(''' class A(): def __init__(self): pass class B(A): pass class C(B): pass class D(object): pass class F(): pass class E(F, D): pass ''') # Test old-style (Python 2) / new-style (Python 3+) ancestors lookups klass2 = module['C'] it = klass2.local_attr_ancestors('__init__') anc_klass = next(it) self.assertIsInstance(anc_klass, nodes.ClassDef) self.assertEqual(anc_klass.name, 'A') if sys.version_info[0] == 2: self.assertRaises(StopIteration, partial(next, it)) else: anc_klass = next(it) self.assertIsInstance(anc_klass, nodes.ClassDef) self.assertEqual(anc_klass.name, 'object') self.assertRaises(StopIteration, partial(next, it)) it = klass2.local_attr_ancestors('method') self.assertRaises(StopIteration, partial(next, it)) # Test mixed-style ancestor lookups klass2 = module['E'] it = klass2.local_attr_ancestors('__init__') anc_klass = next(it) self.assertIsInstance(anc_klass, nodes.ClassDef) self.assertEqual(anc_klass.name, 'object') self.assertRaises(StopIteration, partial(next, it)) def test_local_attr_mro(self): module = builder.parse(''' class A(object): def __init__(self): pass class B(A): def __init__(self, arg, arg2): pass class C(A): pass class D(C, B): pass ''') dclass = module['D'] init = dclass.local_attr('__init__')[0] self.assertIsInstance(init, nodes.FunctionDef) self.assertEqual(init.parent.name, 'B') cclass = module['C'] init = cclass.local_attr('__init__')[0] self.assertIsInstance(init, nodes.FunctionDef) self.assertEqual(init.parent.name, 'A') ancestors = list(dclass.local_attr_ancestors('__init__')) self.assertEqual([node.name for node in ancestors], ['B', 'A', 'object']) def test_instance_attr_ancestors(self): klass2 = self.module['YOUPI'] it = klass2.instance_attr_ancestors('yo') anc_klass = next(it) self.assertIsInstance(anc_klass, nodes.ClassDef) self.assertEqual(anc_klass.name, 'YO') self.assertRaises(StopIteration, partial(next, it)) klass2 = self.module['YOUPI'] it = klass2.instance_attr_ancestors('member') self.assertRaises(StopIteration, partial(next, it)) def test_methods(self): expected_methods = {'__init__', 'class_method', 'method', 'static_method'} klass2 = self.module['YOUPI'] methods = {m.name for m in klass2.methods()} self.assertTrue( methods.issuperset(expected_methods)) methods = {m.name for m in klass2.mymethods()} self.assertSetEqual(expected_methods, methods) klass2 = self.module2['Specialization'] methods = {m.name for m in klass2.mymethods()} self.assertSetEqual(set(), methods) method_locals = klass2.local_attr('method') self.assertEqual(len(method_locals), 1) self.assertEqual(method_locals[0].name, 'method') self.assertRaises(AttributeInferenceError, klass2.local_attr, 'nonexistent') methods = {m.name for m in klass2.methods()} self.assertTrue(methods.issuperset(expected_methods)) #def test_rhs(self): # my_dict = self.module['MY_DICT'] # self.assertIsInstance(my_dict.rhs(), nodes.Dict) # a = self.module['YO']['a'] # value = a.rhs() # self.assertIsInstance(value, nodes.Const) # self.assertEqual(value.value, 1) @unittest.skipIf(sys.version_info[0] >= 3, "Python 2 class semantics required.") def test_ancestors(self): klass = self.module['YOUPI'] self.assertEqual(['YO'], [a.name for a in klass.ancestors()]) klass = self.module2['Specialization'] self.assertEqual(['YOUPI', 'YO'], [a.name for a in klass.ancestors()]) @unittest.skipIf(sys.version_info[0] < 3, "Python 3 class semantics required.") def test_ancestors_py3(self): klass = self.module['YOUPI'] self.assertEqual(['YO', 'object'], [a.name for a in klass.ancestors()]) klass = self.module2['Specialization'] self.assertEqual(['YOUPI', 'YO', 'object'], [a.name for a in klass.ancestors()]) def test_type(self): klass = self.module['YOUPI'] self.assertEqual(klass.type, 'class') klass = self.module2['Metaclass'] self.assertEqual(klass.type, 'metaclass') klass = self.module2['MyException'] self.assertEqual(klass.type, 'exception') klass = self.module2['MyError'] self.assertEqual(klass.type, 'exception') # the following class used to be detected as a metaclass # after the fix which used instance._proxied in .ancestors(), # when in fact it is a normal class klass = self.module2['NotMetaclass'] self.assertEqual(klass.type, 'class') def test_inner_classes(self): eee = self.nonregr['Ccc']['Eee'] self.assertEqual([n.name for n in eee.ancestors()], ['Ddd', 'Aaa', 'object']) def test_classmethod_attributes(self): data = ''' class WebAppObject(object): def registered(cls, application): cls.appli = application cls.schema = application.schema cls.config = application.config return cls registered = classmethod(registered) ''' astroid = builder.parse(data, __name__) cls = astroid['WebAppObject'] assert_keys = ['__module__', '__qualname__', 'appli', 'config', 'registered', 'schema'] if sys.version_info < (3, 3): assert_keys.pop(assert_keys.index('__qualname__')) self.assertEqual(sorted(cls.locals.keys()), assert_keys) def test_class_getattr(self): data = ''' class WebAppObject(object): appli = application appli += 2 del self.appli ''' astroid = builder.parse(data, __name__) cls = astroid['WebAppObject'] # test del statement not returned by getattr self.assertEqual(len(cls.getattr('appli')), 2) def test_instance_getattr(self): data = ''' class WebAppObject(object): def __init__(self, application): self.appli = application self.appli += 2 del self.appli ''' astroid = builder.parse(data) inst = Instance(astroid['WebAppObject']) # test del statement not returned by getattr self.assertEqual(len(inst.getattr('appli')), 2) def test_instance_getattr_with_class_attr(self): data = ''' class Parent: aa = 1 cc = 1 class Klass(Parent): aa = 0 bb = 0 def incr(self, val): self.cc = self.aa if val > self.aa: val = self.aa if val < self.bb: val = self.bb self.aa += val ''' astroid = builder.parse(data) inst = Instance(astroid['Klass']) self.assertEqual(len(inst.getattr('aa')), 3, inst.getattr('aa')) self.assertEqual(len(inst.getattr('bb')), 1, inst.getattr('bb')) self.assertEqual(len(inst.getattr('cc')), 2, inst.getattr('cc')) def test_getattr_method_transform(self): data = ''' class Clazz(object): def m1(self, value): self.value = value m2 = m1 def func(arg1, arg2): "function that will be used as a method" return arg1.value + arg2 Clazz.m3 = func inst = Clazz() inst.m4 = func ''' astroid = builder.parse(data) cls = astroid['Clazz'] # test del statement not returned by getattr for method in ('m1', 'm2', 'm3'): inferred = list(cls.igetattr(method)) self.assertEqual(len(inferred), 1) self.assertIsInstance(inferred[0], UnboundMethod) inferred = list(Instance(cls).igetattr(method)) self.assertEqual(len(inferred), 1) self.assertIsInstance(inferred[0], BoundMethod) inferred = list(Instance(cls).igetattr('m4')) self.assertEqual(len(inferred), 1) self.assertIsInstance(inferred[0], nodes.FunctionDef) def test_getattr_from_grandpa(self): data = ''' class Future: attr = 1 class Present(Future): pass class Past(Present): pass ''' astroid = builder.parse(data) past = astroid['Past'] attr = past.getattr('attr') self.assertEqual(len(attr), 1) attr1 = attr[0] self.assertIsInstance(attr1, nodes.AssignName) self.assertEqual(attr1.name, 'attr') def test_function_with_decorator_lineno(self): data = ''' @f(a=2, b=3) def g1(x): print(x) @f(a=2, b=3) def g2(): pass ''' astroid = builder.parse(data) self.assertEqual(astroid['g1'].fromlineno, 4) self.assertEqual(astroid['g1'].tolineno, 5) self.assertEqual(astroid['g2'].fromlineno, 9) self.assertEqual(astroid['g2'].tolineno, 10) @test_utils.require_version(maxver='3.0') def test_simple_metaclass(self): astroid = builder.parse(""" class Test(object): __metaclass__ = type """) klass = astroid['Test'] metaclass = klass.metaclass() self.assertIsInstance(metaclass, scoped_nodes.ClassDef) self.assertEqual(metaclass.name, 'type') def test_metaclass_error(self): astroid = builder.parse(""" class Test(object): __metaclass__ = typ """) klass = astroid['Test'] self.assertFalse(klass.metaclass()) @test_utils.require_version(maxver='3.0') def test_metaclass_imported(self): astroid = builder.parse(""" from abc import ABCMeta class Test(object): __metaclass__ = ABCMeta """) klass = astroid['Test'] metaclass = klass.metaclass() self.assertIsInstance(metaclass, scoped_nodes.ClassDef) self.assertEqual(metaclass.name, 'ABCMeta') def test_metaclass_yes_leak(self): astroid = builder.parse(""" # notice `ab` instead of `abc` from ab import ABCMeta class Meta(object): __metaclass__ = ABCMeta """) klass = astroid['Meta'] self.assertIsNone(klass.metaclass()) @test_utils.require_version(maxver='3.0') def test_newstyle_and_metaclass_good(self): astroid = builder.parse(""" from abc import ABCMeta class Test: __metaclass__ = ABCMeta """) klass = astroid['Test'] self.assertTrue(klass.newstyle) self.assertEqual(klass.metaclass().name, 'ABCMeta') astroid = builder.parse(""" from abc import ABCMeta __metaclass__ = ABCMeta class Test: pass """) klass = astroid['Test'] self.assertTrue(klass.newstyle) self.assertEqual(klass.metaclass().name, 'ABCMeta') @test_utils.require_version(maxver='3.0') def test_nested_metaclass(self): astroid = builder.parse(""" from abc import ABCMeta class A(object): __metaclass__ = ABCMeta class B: pass __metaclass__ = ABCMeta class C: __metaclass__ = type class D: pass """) a = astroid['A'] b = a.locals['B'][0] c = astroid['C'] d = c.locals['D'][0] self.assertEqual(a.metaclass().name, 'ABCMeta') self.assertFalse(b.newstyle) self.assertIsNone(b.metaclass()) self.assertEqual(c.metaclass().name, 'type') self.assertEqual(d.metaclass().name, 'ABCMeta') @test_utils.require_version(maxver='3.0') def test_parent_metaclass(self): astroid = builder.parse(""" from abc import ABCMeta class Test: __metaclass__ = ABCMeta class SubTest(Test): pass """) klass = astroid['SubTest'] self.assertTrue(klass.newstyle) metaclass = klass.metaclass() self.assertIsInstance(metaclass, scoped_nodes.ClassDef) self.assertEqual(metaclass.name, 'ABCMeta') @test_utils.require_version(maxver='3.0') def test_metaclass_ancestors(self): astroid = builder.parse(""" from abc import ABCMeta class FirstMeta(object): __metaclass__ = ABCMeta class SecondMeta(object): __metaclass__ = type class Simple(object): pass class FirstImpl(FirstMeta): pass class SecondImpl(FirstImpl): pass class ThirdImpl(Simple, SecondMeta): pass """) classes = { 'ABCMeta': ('FirstImpl', 'SecondImpl'), 'type': ('ThirdImpl', ) } for metaclass, names in classes.items(): for name in names: impl = astroid[name] meta = impl.metaclass() self.assertIsInstance(meta, nodes.ClassDef) self.assertEqual(meta.name, metaclass) def test_metaclass_type(self): klass = builder.extract_node(""" def with_metaclass(meta, base=object): return meta("NewBase", (base, ), {}) class ClassWithMeta(with_metaclass(type)): #@ pass """) self.assertEqual( ['NewBase', 'object'], [base.name for base in klass.ancestors()]) def test_no_infinite_metaclass_loop(self): klass = builder.extract_node(""" class SSS(object): class JJJ(object): pass @classmethod def Init(cls): cls.JJJ = type('JJJ', (cls.JJJ,), {}) class AAA(SSS): pass class BBB(AAA.JJJ): pass """) self.assertFalse(scoped_nodes._is_metaclass(klass)) ancestors = [base.name for base in klass.ancestors()] self.assertIn('object', ancestors) self.assertIn('JJJ', ancestors) def test_no_infinite_metaclass_loop_with_redefine(self): ast_nodes = builder.extract_node(""" import datetime class A(datetime.date): #@ @classmethod def now(cls): return cls() class B(datetime.date): #@ pass datetime.date = A datetime.date = B """) for klass in ast_nodes: self.assertEqual(None, klass.metaclass()) def test_metaclass_generator_hack(self): klass = builder.extract_node(""" import six class WithMeta(six.with_metaclass(type, object)): #@ pass """) self.assertEqual( ['object'], [base.name for base in klass.ancestors()]) self.assertEqual( 'type', klass.metaclass().name) def test_using_six_add_metaclass(self): klass = builder.extract_node(''' import six import abc @six.add_metaclass(abc.ABCMeta) class WithMeta(object): pass ''') inferred = next(klass.infer()) metaclass = inferred.metaclass() self.assertIsInstance(metaclass, scoped_nodes.ClassDef) self.assertIn(metaclass.qname(), ('abc.ABCMeta', '_py_abc.ABCMeta')) def test_using_invalid_six_add_metaclass_call(self): klass = builder.extract_node(''' import six @six.add_metaclass() class Invalid(object): pass ''') inferred = next(klass.infer()) self.assertIsNone(inferred.metaclass()) def test_nonregr_infer_callresult(self): astroid = builder.parse(""" class Delegate(object): def __get__(self, obj, cls): return getattr(obj._subject, self.attribute) class CompositeBuilder(object): __call__ = Delegate() builder = CompositeBuilder(result, composite) tgts = builder() """) instance = astroid['tgts'] # used to raise "'_Yes' object is not iterable", see # https://bitbucket.org/logilab/astroid/issue/17 self.assertEqual(list(instance.infer()), [util.Uninferable]) def test_slots(self): astroid = builder.parse(""" from collections import deque from textwrap import dedent class First(object): #@ __slots__ = ("a", "b", 1) class Second(object): #@ __slots__ = "a" class Third(object): #@ __slots__ = deque(["a", "b", "c"]) class Fourth(object): #@ __slots__ = {"a": "a", "b": "b"} class Fifth(object): #@ __slots__ = list class Sixth(object): #@ __slots__ = "" class Seventh(object): #@ __slots__ = dedent.__name__ class Eight(object): #@ __slots__ = ("parens") class Ninth(object): #@ pass class Ten(object): #@ __slots__ = dict({"a": "b", "c": "d"}) """) expected = [ ('First', ('a', 'b')), ('Second', ('a', )), ('Third', None), ('Fourth', ('a', 'b')), ('Fifth', None), ('Sixth', None), ('Seventh', ('dedent', )), ('Eight', ('parens', )), ('Ninth', None), ('Ten', ('a', 'c')), ] for cls, expected_value in expected: slots = astroid[cls].slots() if expected_value is None: self.assertIsNone(slots) else: self.assertEqual(list(expected_value), [node.value for node in slots]) @test_utils.require_version(maxver='3.0') def test_slots_py2(self): module = builder.parse(""" class UnicodeSlots(object): __slots__ = (u"a", u"b", "c") """) slots = module['UnicodeSlots'].slots() self.assertEqual(len(slots), 3) self.assertEqual(slots[0].value, "a") self.assertEqual(slots[1].value, "b") self.assertEqual(slots[2].value, "c") @test_utils.require_version(maxver='3.0') def test_slots_py2_not_implemented(self): module = builder.parse(""" class OldStyle: __slots__ = ("a", "b") """) msg = "The concept of slots is undefined for old-style classes." with self.assertRaises(NotImplementedError) as cm: module['OldStyle'].slots() self.assertEqual(str(cm.exception), msg) def test_slots_for_dict_keys(self): module = builder.parse(''' class Issue(object): SlotDefaults = {'id': 0, 'id1':1} __slots__ = SlotDefaults.keys() ''') cls = module['Issue'] slots = cls.slots() self.assertEqual(len(slots), 2) self.assertEqual(slots[0].value, 'id') self.assertEqual(slots[1].value, 'id1') def test_slots_empty_list_of_slots(self): module = builder.parse(""" class Klass(object): __slots__ = () """) cls = module['Klass'] self.assertEqual(cls.slots(), []) def test_slots_taken_from_parents(self): module = builder.parse(''' class FirstParent(object): __slots__ = ('a', 'b', 'c') class SecondParent(FirstParent): __slots__ = ('d', 'e') class Third(SecondParent): __slots__ = ('d', ) ''') cls = module['Third'] slots = cls.slots() self.assertEqual(sorted({slot.value for slot in slots}), ['a', 'b', 'c', 'd', 'e']) def test_all_ancestors_need_slots(self): module = builder.parse(''' class A(object): __slots__ = ('a', ) class B(A): pass class C(B): __slots__ = ('a', ) ''') cls = module['C'] self.assertIsNone(cls.slots()) cls = module['B'] self.assertIsNone(cls.slots()) def assertEqualMro(self, klass, expected_mro): self.assertEqual( [member.name for member in klass.mro()], expected_mro) @test_utils.require_version(maxver='3.0') def test_no_mro_for_old_style(self): node = builder.extract_node(""" class Old: pass""") with self.assertRaises(NotImplementedError) as cm: node.mro() self.assertEqual(str(cm.exception), "Could not obtain mro for " "old-style classes.") @test_utils.require_version(maxver='3.0') def test_mro_for_classes_with_old_style_in_mro(self): node = builder.extract_node(''' class Factory: pass class ClientFactory(Factory): pass class ReconnectingClientFactory(ClientFactory): pass class WebSocketAdapterFactory(object): pass class WebSocketClientFactory(WebSocketAdapterFactory, ClientFactory): pass class WampWebSocketClientFactory(WebSocketClientFactory): pass class RetryFactory(WampWebSocketClientFactory, ReconnectingClientFactory): pas ''') self.assertEqualMro( node, ['RetryFactory', 'WampWebSocketClientFactory', 'WebSocketClientFactory', 'WebSocketAdapterFactory', 'object', 'ReconnectingClientFactory', 'ClientFactory', 'Factory'] ) @test_utils.require_version(maxver='3.0') def test_combined_newstyle_oldstyle_in_mro(self): node = builder.extract_node(''' class Old: pass class New(object): pass class New1(object): pass class New2(New, New1): pass class NewOld(New2, Old): #@ pass ''') self.assertEqualMro(node, ['NewOld', 'New2', 'New', 'New1', 'object', 'Old']) self.assertTrue(node.newstyle) def test_with_metaclass_mro(self): astroid = builder.parse(""" import six class C(object): pass class B(C): pass class A(six.with_metaclass(type, B)): pass """) self.assertEqualMro(astroid['A'], ['A', 'B', 'C', 'object']) def test_mro(self): astroid = builder.parse(""" class C(object): pass class D(dict, C): pass class A1(object): pass class B1(A1): pass class C1(A1): pass class D1(B1, C1): pass class E1(C1, B1): pass class F1(D1, E1): pass class G1(E1, D1): pass class Boat(object): pass class DayBoat(Boat): pass class WheelBoat(Boat): pass class EngineLess(DayBoat): pass class SmallMultihull(DayBoat): pass class PedalWheelBoat(EngineLess, WheelBoat): pass class SmallCatamaran(SmallMultihull): pass class Pedalo(PedalWheelBoat, SmallCatamaran): pass class OuterA(object): class Inner(object): pass class OuterB(OuterA): class Inner(OuterA.Inner): pass class OuterC(OuterA): class Inner(OuterA.Inner): pass class OuterD(OuterC): class Inner(OuterC.Inner, OuterB.Inner): pass class Duplicates(str, str): pass """) self.assertEqualMro(astroid['D'], ['D', 'dict', 'C', 'object']) self.assertEqualMro(astroid['D1'], ['D1', 'B1', 'C1', 'A1', 'object']) self.assertEqualMro(astroid['E1'], ['E1', 'C1', 'B1', 'A1', 'object']) with self.assertRaises(InconsistentMroError) as cm: astroid['F1'].mro() A1 = astroid.getattr('A1')[0] B1 = astroid.getattr('B1')[0] C1 = astroid.getattr('C1')[0] object_ = builder.MANAGER.astroid_cache[BUILTINS].getattr('object')[0] self.assertEqual(cm.exception.mros, [[B1, C1, A1, object_], [C1, B1, A1, object_]]) with self.assertRaises(InconsistentMroError) as cm: astroid['G1'].mro() self.assertEqual(cm.exception.mros, [[C1, B1, A1, object_], [B1, C1, A1, object_]]) self.assertEqualMro( astroid['PedalWheelBoat'], ["PedalWheelBoat", "EngineLess", "DayBoat", "WheelBoat", "Boat", "object"]) self.assertEqualMro( astroid["SmallCatamaran"], ["SmallCatamaran", "SmallMultihull", "DayBoat", "Boat", "object"]) self.assertEqualMro( astroid["Pedalo"], ["Pedalo", "PedalWheelBoat", "EngineLess", "SmallCatamaran", "SmallMultihull", "DayBoat", "WheelBoat", "Boat", "object"]) self.assertEqualMro( astroid['OuterD']['Inner'], ['Inner', 'Inner', 'Inner', 'Inner', 'object']) with self.assertRaises(DuplicateBasesError) as cm: astroid['Duplicates'].mro() Duplicates = astroid.getattr('Duplicates')[0] self.assertEqual(cm.exception.cls, Duplicates) self.assertIsInstance(cm.exception, MroError) self.assertIsInstance(cm.exception, ResolveError) def test_mro_with_factories(self): cls = builder.extract_node(''' def MixinFactory(cls): mixin_name = '{}Mixin'.format(cls.__name__) mixin_bases = (object,) mixin_attrs = {} mixin = type(mixin_name, mixin_bases, mixin_attrs) return mixin class MixinA(MixinFactory(int)): pass class MixinB(MixinFactory(str)): pass class Base(object): pass class ClassA(MixinA, Base): pass class ClassB(MixinB, ClassA): pass class FinalClass(ClassB): def __init__(self): self.name = 'x' ''') self.assertEqualMro( cls, [ "FinalClass", "ClassB", "MixinB", "", "ClassA", "MixinA", "", "Base", "object" ] ) def test_generator_from_infer_call_result_parent(self): func = builder.extract_node(""" import contextlib @contextlib.contextmanager def test(): #@ yield """) result = next(func.infer_call_result()) self.assertIsInstance(result, Generator) self.assertEqual(result.parent, func) def test_type_three_arguments(self): classes = builder.extract_node(""" type('A', (object, ), {"a": 1, "b": 2, missing: 3}) #@ """) first = next(classes.infer()) self.assertIsInstance(first, nodes.ClassDef) self.assertEqual(first.name, "A") self.assertEqual(first.basenames, ["object"]) self.assertIsInstance(first["a"], nodes.Const) self.assertEqual(first["a"].value, 1) self.assertIsInstance(first["b"], nodes.Const) self.assertEqual(first["b"].value, 2) with self.assertRaises(AttributeInferenceError): first.getattr("missing") def test_implicit_metaclass(self): cls = builder.extract_node(""" class A(object): pass """) type_cls = scoped_nodes.builtin_lookup("type")[1][0] self.assertEqual(cls.implicit_metaclass(), type_cls) def test_implicit_metaclass_lookup(self): cls = builder.extract_node(''' class A(object): pass ''') instance = cls.instantiate_class() func = cls.getattr('mro') self.assertEqual(len(func), 1) self.assertRaises(AttributeInferenceError, instance.getattr, 'mro') def test_metaclass_lookup_using_same_class(self): # Check that we don't have recursive attribute access for metaclass cls = builder.extract_node(''' class A(object): pass ''') self.assertEqual(len(cls.getattr('mro')), 1) def test_metaclass_lookup_inferrence_errors(self): module = builder.parse(''' import six class Metaclass(type): foo = lala @six.add_metaclass(Metaclass) class B(object): pass ''') cls = module['B'] self.assertEqual(util.Uninferable, next(cls.igetattr('foo'))) def test_metaclass_lookup(self): module = builder.parse(''' import six class Metaclass(type): foo = 42 @classmethod def class_method(cls): pass def normal_method(cls): pass @property def meta_property(cls): return 42 @staticmethod def static(): pass @six.add_metaclass(Metaclass) class A(object): pass ''') acls = module['A'] normal_attr = next(acls.igetattr('foo')) self.assertIsInstance(normal_attr, nodes.Const) self.assertEqual(normal_attr.value, 42) class_method = next(acls.igetattr('class_method')) self.assertIsInstance(class_method, BoundMethod) self.assertEqual(class_method.bound, module['Metaclass']) normal_method = next(acls.igetattr('normal_method')) self.assertIsInstance(normal_method, BoundMethod) self.assertEqual(normal_method.bound, module['A']) # Attribute access for properties: # from the metaclass is a property object # from the class that uses the metaclass, the value # of the property property_meta = next(module['Metaclass'].igetattr('meta_property')) self.assertIsInstance(property_meta, UnboundMethod) wrapping = scoped_nodes.get_wrapping_class(property_meta) self.assertEqual(wrapping, module['Metaclass']) property_class = next(acls.igetattr('meta_property')) self.assertIsInstance(property_class, nodes.Const) self.assertEqual(property_class.value, 42) static = next(acls.igetattr('static')) self.assertIsInstance(static, scoped_nodes.FunctionDef) @test_utils.require_version(maxver='3.0') def test_implicit_metaclass_is_none(self): cls = builder.extract_node(""" class A: pass """) self.assertIsNone(cls.implicit_metaclass()) def test_local_attr_invalid_mro(self): cls = builder.extract_node(""" # A has an invalid MRO, local_attr should fallback # to using .ancestors. class A(object, object): test = 42 class B(A): #@ pass """) local = cls.local_attr('test')[0] inferred = next(local.infer()) self.assertIsInstance(inferred, nodes.Const) self.assertEqual(inferred.value, 42) def test_has_dynamic_getattr(self): module = builder.parse(""" class Getattr(object): def __getattr__(self, attrname): pass class Getattribute(object): def __getattribute__(self, attrname): pass class ParentGetattr(Getattr): pass """) self.assertTrue(module['Getattr'].has_dynamic_getattr()) self.assertTrue(module['Getattribute'].has_dynamic_getattr()) self.assertTrue(module['ParentGetattr'].has_dynamic_getattr()) # Test that objects analyzed through the live introspection # aren't considered to have dynamic getattr implemented. import datetime astroid_builder = builder.AstroidBuilder() module = astroid_builder.module_build(datetime) self.assertFalse(module['timedelta'].has_dynamic_getattr()) def test_duplicate_bases_namedtuple(self): module = builder.parse(""" import collections _A = collections.namedtuple('A', 'a') class A(_A): pass class B(A): pass """) names = ['B', 'A', 'A', 'tuple', 'object'] mro = module['B'].mro() class_names = [i.name for i in mro] self.assertEqual(names, class_names) def test_instance_bound_method_lambdas(self): ast_nodes = builder.extract_node(''' class Test(object): #@ lam = lambda self: self not_method = lambda xargs: xargs Test() #@ ''') cls = next(ast_nodes[0].infer()) self.assertIsInstance(next(cls.igetattr('lam')), scoped_nodes.Lambda) self.assertIsInstance(next(cls.igetattr('not_method')), scoped_nodes.Lambda) instance = next(ast_nodes[1].infer()) lam = next(instance.igetattr('lam')) self.assertIsInstance(lam, BoundMethod) not_method = next(instance.igetattr('not_method')) self.assertIsInstance(not_method, scoped_nodes.Lambda) def test_class_extra_decorators_frame_is_not_class(self): ast_node = builder.extract_node(''' def ala(): def bala(): #@ func = 42 ''') self.assertEqual(ast_node.extra_decorators, []) def test_class_extra_decorators_only_callfunc_are_considered(self): ast_node = builder.extract_node(''' class Ala(object): def func(self): #@ pass func = 42 ''') self.assertEqual(ast_node.extra_decorators, []) def test_class_extra_decorators_only_assignment_names_are_considered(self): ast_node = builder.extract_node(''' class Ala(object): def func(self): #@ pass def __init__(self): self.func = staticmethod(func) ''') self.assertEqual(ast_node.extra_decorators, []) def test_class_extra_decorators_only_same_name_considered(self): ast_node = builder.extract_node(''' class Ala(object): def func(self): #@ pass bala = staticmethod(func) ''') self.assertEqual(ast_node.extra_decorators, []) self.assertEqual(ast_node.type, 'method') def test_class_extra_decorators(self): static_method, clsmethod = builder.extract_node(''' class Ala(object): def static(self): #@ pass def class_method(self): #@ pass class_method = classmethod(class_method) static = staticmethod(static) ''') self.assertEqual(len(clsmethod.extra_decorators), 1) self.assertEqual(clsmethod.type, 'classmethod') self.assertEqual(len(static_method.extra_decorators), 1) self.assertEqual(static_method.type, 'staticmethod') def test_extra_decorators_only_class_level_assignments(self): node = builder.extract_node(''' def _bind(arg): return arg.bind class A(object): @property def bind(self): return 42 def irelevant(self): # This is important, because it used to trigger # a maximum recursion error. bind = _bind(self) return bind A() #@ ''') inferred = next(node.infer()) bind = next(inferred.igetattr('bind')) self.assertIsInstance(bind, nodes.Const) self.assertEqual(bind.value, 42) parent = bind.scope() self.assertEqual(len(parent.extra_decorators), 0) @test_utils.require_version(minver='3.0') def test_class_keywords(self): data = ''' class TestKlass(object, metaclass=TestMetaKlass, foo=42, bar='baz'): pass ''' astroid = builder.parse(data, __name__) cls = astroid['TestKlass'] self.assertEqual(len(cls.keywords), 2) self.assertEqual([x.arg for x in cls.keywords], ['foo', 'bar']) if __name__ == '__main__': unittest.main() astroid-2.0.1/astroid/tests/unittest_transforms.py0000644000076500000240000002004513324063433023250 0ustar claudiustaff00000000000000# Copyright (c) 2015-2017 Claudiu Popa # Copyright (c) 2015-2016 Ceridwen # Copyright (c) 2016 Jakub Wilk # Copyright (c) 2018 Bryce Guinta # Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html # For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER from __future__ import print_function import contextlib import time import unittest from astroid import builder from astroid import nodes from astroid import parse from astroid import transforms @contextlib.contextmanager def add_transform(manager, node, transform, predicate=None): manager.register_transform(node, transform, predicate) try: yield finally: manager.unregister_transform(node, transform, predicate) class TestTransforms(unittest.TestCase): def setUp(self): self.transformer = transforms.TransformVisitor() def parse_transform(self, code): module = parse(code, apply_transforms=False) return self.transformer.visit(module) def test_function_inlining_transform(self): def transform_call(node): # Let's do some function inlining inferred = next(node.infer()) return inferred self.transformer.register_transform(nodes.Call, transform_call) module = self.parse_transform(''' def test(): return 42 test() #@ ''') self.assertIsInstance(module.body[1], nodes.Expr) self.assertIsInstance(module.body[1].value, nodes.Const) self.assertEqual(module.body[1].value.value, 42) def test_recursive_transforms_into_astroid_fields(self): # Test that the transformer walks properly the tree # by going recursively into the _astroid_fields per each node. def transform_compare(node): # Let's check the values of the ops _, right = node.ops[0] # Assume they are Consts and they were transformed before # us. return nodes.const_factory(node.left.value < right.value) def transform_name(node): # Should be Consts return next(node.infer()) self.transformer.register_transform(nodes.Compare, transform_compare) self.transformer.register_transform(nodes.Name, transform_name) module = self.parse_transform(''' a = 42 b = 24 a < b ''') self.assertIsInstance(module.body[2], nodes.Expr) self.assertIsInstance(module.body[2].value, nodes.Const) self.assertFalse(module.body[2].value.value) def test_transform_patches_locals(self): def transform_function(node): assign = nodes.Assign() name = nodes.AssignName() name.name = 'value' assign.targets = [name] assign.value = nodes.const_factory(42) node.body.append(assign) self.transformer.register_transform(nodes.FunctionDef, transform_function) module = self.parse_transform(''' def test(): pass ''') func = module.body[0] self.assertEqual(len(func.body), 2) self.assertIsInstance(func.body[1], nodes.Assign) self.assertEqual(func.body[1].as_string(), 'value = 42') def test_predicates(self): def transform_call(node): inferred = next(node.infer()) return inferred def should_inline(node): return node.func.name.startswith('inlineme') self.transformer.register_transform(nodes.Call, transform_call, should_inline) module = self.parse_transform(''' def inlineme_1(): return 24 def dont_inline_me(): return 42 def inlineme_2(): return 2 inlineme_1() dont_inline_me() inlineme_2() ''') values = module.body[-3:] self.assertIsInstance(values[0], nodes.Expr) self.assertIsInstance(values[0].value, nodes.Const) self.assertEqual(values[0].value.value, 24) self.assertIsInstance(values[1], nodes.Expr) self.assertIsInstance(values[1].value, nodes.Call) self.assertIsInstance(values[2], nodes.Expr) self.assertIsInstance(values[2].value, nodes.Const) self.assertEqual(values[2].value.value, 2) def test_transforms_are_separated(self): # Test that the transforming is done at a separate # step, which means that we are not doing inference # on a partially constructed tree anymore, which was the # source of crashes in the past when certain inference rules # were used in a transform. def transform_function(node): if node.decorators: for decorator in node.decorators.nodes: inferred = next(decorator.infer()) if inferred.qname() == 'abc.abstractmethod': return next(node.infer_call_result()) return None manager = builder.MANAGER with add_transform(manager, nodes.FunctionDef, transform_function): module = builder.parse(''' import abc from abc import abstractmethod class A(object): @abc.abstractmethod def ala(self): return 24 @abstractmethod def bala(self): return 42 ''') cls = module['A'] ala = cls.body[0] bala = cls.body[1] self.assertIsInstance(ala, nodes.Const) self.assertEqual(ala.value, 24) self.assertIsInstance(bala, nodes.Const) self.assertEqual(bala.value, 42) def test_transforms_are_called_for_builtin_modules(self): # Test that transforms are called for builtin modules. def transform_function(node): name = nodes.AssignName() name.name = 'value' node.args.args = [name] return node manager = builder.MANAGER predicate = lambda node: node.root().name == 'time' with add_transform(manager, nodes.FunctionDef, transform_function, predicate): builder_instance = builder.AstroidBuilder() module = builder_instance.module_build(time) asctime = module['asctime'] self.assertEqual(len(asctime.args.args), 1) self.assertIsInstance(asctime.args.args[0], nodes.AssignName) self.assertEqual(asctime.args.args[0].name, 'value') def test_builder_apply_transforms(self): def transform_function(node): return nodes.const_factory(42) manager = builder.MANAGER with add_transform(manager, nodes.FunctionDef, transform_function): astroid_builder = builder.AstroidBuilder(apply_transforms=False) module = astroid_builder.string_build('''def test(): pass''') # The transform wasn't applied. self.assertIsInstance(module.body[0], nodes.FunctionDef) def test_transform_crashes_on_is_subtype_of(self): # Test that we don't crash when having is_subtype_of # in a transform, as per issue #188. This happened # before, when the transforms weren't in their own step. def transform_class(cls): if cls.is_subtype_of('django.db.models.base.Model'): return cls return cls self.transformer.register_transform(nodes.ClassDef, transform_class) self.parse_transform(''' # Change environ to automatically call putenv() if it exists import os putenv = os.putenv try: # This will fail if there's no putenv putenv except NameError: pass else: import UserDict ''') if __name__ == '__main__': unittest.main() astroid-2.0.1/astroid/tests/unittest_utils.py0000644000076500000240000000771413324063433022222 0ustar claudiustaff00000000000000# Copyright (c) 2008-2010, 2013 LOGILAB S.A. (Paris, FRANCE) # Copyright (c) 2014 Google, Inc. # Copyright (c) 2015-2016 Claudiu Popa # Copyright (c) 2016 Ceridwen # Copyright (c) 2016 Dave Baum # Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html # For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER import unittest from astroid import builder from astroid import InferenceError from astroid import nodes from astroid import node_classes from astroid import util as astroid_util class InferenceUtil(unittest.TestCase): def test_not_exclusive(self): module = builder.parse(""" x = 10 for x in range(5): print (x) if x > 0: print ('#' * x) """, __name__, __file__) xass1 = module.locals['x'][0] assert xass1.lineno == 2 xnames = [n for n in module.nodes_of_class(nodes.Name) if n.name == 'x'] assert len(xnames) == 3 assert xnames[1].lineno == 6 self.assertEqual(node_classes.are_exclusive(xass1, xnames[1]), False) self.assertEqual(node_classes.are_exclusive(xass1, xnames[2]), False) def test_if(self): module = builder.parse(''' if 1: a = 1 a = 2 elif 2: a = 12 a = 13 else: a = 3 a = 4 ''') a1 = module.locals['a'][0] a2 = module.locals['a'][1] a3 = module.locals['a'][2] a4 = module.locals['a'][3] a5 = module.locals['a'][4] a6 = module.locals['a'][5] self.assertEqual(node_classes.are_exclusive(a1, a2), False) self.assertEqual(node_classes.are_exclusive(a1, a3), True) self.assertEqual(node_classes.are_exclusive(a1, a5), True) self.assertEqual(node_classes.are_exclusive(a3, a5), True) self.assertEqual(node_classes.are_exclusive(a3, a4), False) self.assertEqual(node_classes.are_exclusive(a5, a6), False) def test_try_except(self): module = builder.parse(''' try: def exclusive_func2(): "docstring" except TypeError: def exclusive_func2(): "docstring" except: def exclusive_func2(): "docstring" else: def exclusive_func2(): "this one redefine the one defined line 42" ''') f1 = module.locals['exclusive_func2'][0] f2 = module.locals['exclusive_func2'][1] f3 = module.locals['exclusive_func2'][2] f4 = module.locals['exclusive_func2'][3] self.assertEqual(node_classes.are_exclusive(f1, f2), True) self.assertEqual(node_classes.are_exclusive(f1, f3), True) self.assertEqual(node_classes.are_exclusive(f1, f4), False) self.assertEqual(node_classes.are_exclusive(f2, f4), True) self.assertEqual(node_classes.are_exclusive(f3, f4), True) self.assertEqual(node_classes.are_exclusive(f3, f2), True) self.assertEqual(node_classes.are_exclusive(f2, f1), True) self.assertEqual(node_classes.are_exclusive(f4, f1), False) self.assertEqual(node_classes.are_exclusive(f4, f2), True) def test_unpack_infer_uninferable_nodes(self): node = builder.extract_node(''' x = [A] * 1 f = [x, [A] * 2] f ''') inferred = next(node.infer()) unpacked = list(node_classes.unpack_infer(inferred)) self.assertEqual(len(unpacked), 3) self.assertTrue(all(elt is astroid_util.Uninferable for elt in unpacked)) def test_unpack_infer_empty_tuple(self): node = builder.extract_node(''' () ''') inferred = next(node.infer()) with self.assertRaises(InferenceError): list(node_classes.unpack_infer(inferred)) if __name__ == '__main__': unittest.main() astroid-2.0.1/astroid/transforms.py0000644000076500000240000000611613324063433020152 0ustar claudiustaff00000000000000# Copyright (c) 2015-2016, 2018 Claudiu Popa # Copyright (c) 2016 Ceridwen # Copyright (c) 2018 Nick Drozd # Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html # For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER import collections class TransformVisitor: """A visitor for handling transforms. The standard approach of using it is to call :meth:`~visit` with an *astroid* module and the class will take care of the rest, walking the tree and running the transforms for each encountered node. """ def __init__(self): self.transforms = collections.defaultdict(list) def _transform(self, node): """Call matching transforms for the given node if any and return the transformed node. """ cls = node.__class__ if cls not in self.transforms: # no transform registered for this class of node return node transforms = self.transforms[cls] for transform_func, predicate in transforms: if predicate is None or predicate(node): ret = transform_func(node) # if the transformation function returns something, it's # expected to be a replacement for the node if ret is not None: node = ret if ret.__class__ != cls: # Can no longer apply the rest of the transforms. break return node def _visit(self, node): if hasattr(node, '_astroid_fields'): for field in node._astroid_fields: value = getattr(node, field) visited = self._visit_generic(value) setattr(node, field, visited) return self._transform(node) def _visit_generic(self, node): if isinstance(node, list): return [self._visit_generic(child) for child in node] if isinstance(node, tuple): return tuple(self._visit_generic(child) for child in node) return self._visit(node) def register_transform(self, node_class, transform, predicate=None): """Register `transform(node)` function to be applied on the given astroid's `node_class` if `predicate` is None or returns true when called with the node as argument. The transform function may return a value which is then used to substitute the original node in the tree. """ self.transforms[node_class].append((transform, predicate)) def unregister_transform(self, node_class, transform, predicate=None): """Unregister the given transform.""" self.transforms[node_class].remove((transform, predicate)) def visit(self, module): """Walk the given astroid *tree* and transform each encountered node Only the nodes which have transforms registered will actually be replaced or changed. """ module.body = [self._visit(child) for child in module.body] return self._transform(module) astroid-2.0.1/astroid/util.py0000644000076500000240000001134713324063433016733 0ustar claudiustaff00000000000000# Copyright (c) 2015-2018 Claudiu Popa # Copyright (c) 2015-2016 Ceridwen # Copyright (c) 2018 Bryce Guinta # Copyright (c) 2018 Nick Drozd # Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html # For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER import warnings from itertools import islice import importlib import lazy_object_proxy def lazy_descriptor(obj): class DescriptorProxy(lazy_object_proxy.Proxy): def __get__(self, instance, owner=None): return self.__class__.__get__(self, instance) return DescriptorProxy(obj) def lazy_import(module_name): return lazy_object_proxy.Proxy( lambda: importlib.import_module('.' + module_name, 'astroid')) @object.__new__ class Uninferable: """Special inference object, which is returned when inference fails.""" def __repr__(self): return 'Uninferable' __str__ = __repr__ def __getattribute__(self, name): if name == 'next': raise AttributeError('next method should not be called') if name.startswith('__') and name.endswith('__'): return object.__getattribute__(self, name) if name == 'accept': return object.__getattribute__(self, name) return self def __call__(self, *args, **kwargs): return self def __bool__(self): return False __nonzero__ = __bool__ def accept(self, visitor): func = getattr(visitor, "visit_uninferable") return func(self) class BadOperationMessage: """Object which describes a TypeError occurred somewhere in the inference chain This is not an exception, but a container object which holds the types and the error which occurred. """ class BadUnaryOperationMessage(BadOperationMessage): """Object which describes operational failures on UnaryOps.""" def __init__(self, operand, op, error): self.operand = operand self.op = op self.error = error @property def _object_type_helper(self): helpers = lazy_import('helpers') return helpers.object_type def _object_type(self, obj): # pylint: disable=not-callable; can't infer lazy_import objtype = self._object_type_helper(obj) if objtype is Uninferable: return None return objtype def __str__(self): if hasattr(self.operand, 'name'): operand_type = self.operand.name else: object_type = self._object_type(self.operand) if hasattr(object_type, 'name'): operand_type = object_type.name else: # Just fallback to as_string operand_type = object_type.as_string() msg = "bad operand type for unary {}: {}" return msg.format(self.op, operand_type) class BadBinaryOperationMessage(BadOperationMessage): """Object which describes type errors for BinOps.""" def __init__(self, left_type, op, right_type): self.left_type = left_type self.right_type = right_type self.op = op def __str__(self): msg = "unsupported operand type(s) for {}: {!r} and {!r}" return msg.format(self.op, self.left_type.name, self.right_type.name) def _instancecheck(cls, other): wrapped = cls.__wrapped__ other_cls = other.__class__ is_instance_of = wrapped is other_cls or issubclass(other_cls, wrapped) warnings.warn("%r is deprecated and slated for removal in astroid " "2.0, use %r instead" % (cls.__class__.__name__, wrapped.__name__), PendingDeprecationWarning, stacklevel=2) return is_instance_of def proxy_alias(alias_name, node_type): """Get a Proxy from the given name to the given node type.""" proxy = type(alias_name, (lazy_object_proxy.Proxy,), {'__class__': object.__dict__['__class__'], '__instancecheck__': _instancecheck}) return proxy(lambda: node_type) def limit_inference(iterator, size): """Limit inference amount. Limit inference amount to help with performance issues with exponentially exploding possible results. :param iterator: Inference generator to limit :type iterator: Iterator(NodeNG) :param size: Maximum mount of nodes yielded plus an Uninferable at the end if limit reached :type size: int :yields: A possibly modified generator :rtype param: Iterable """ yield from islice(iterator, size) has_more = next(iterator, False) if has_more is not False: yield Uninferable return # Backwards-compatibility aliases YES = Uninferable astroid-2.0.1/astroid.egg-info/0000755000076500000240000000000013324065077017076 5ustar claudiustaff00000000000000astroid-2.0.1/astroid.egg-info/dependency_links.txt0000644000076500000240000000000113324065077023144 0ustar claudiustaff00000000000000 astroid-2.0.1/astroid.egg-info/PKG-INFO0000644000076500000240000000645013324065077020200 0ustar claudiustaff00000000000000Metadata-Version: 1.2 Name: astroid Version: 2.0.1 Summary: A abstract syntax tree for Python with inference support. Home-page: https://github.com/PyCQA/astroid Author: Python Code Quality Authority Author-email: code-quality@python.org License: LGPL Description: Astroid ======= .. image:: https://travis-ci.org/PyCQA/astroid.svg?branch=master :target: https://travis-ci.org/PyCQA/astroid .. image:: https://ci.appveyor.com/api/projects/status/co3u42kunguhbh6l/branch/master?svg=true :alt: AppVeyor Build Status :target: https://ci.appveyor.com/project/PCManticore/astroid .. image:: https://coveralls.io/repos/github/PyCQA/astroid/badge.svg?branch=master :target: https://coveralls.io/github/PyCQA/astroid?branch=master .. image:: https://readthedocs.org/projects/astroid/badge/?version=latest :target: http://astroid.readthedocs.io/en/latest/?badge=latest :alt: Documentation Status What's this? ------------ The aim of this module is to provide a common base representation of python source code. It is currently the powering pylint's capabilities. It provides a compatible representation which comes from the `_ast` module. It rebuilds the tree generated by the builtin _ast module by recursively walking down the AST and building an extended ast. The new node classes have additional methods and attributes for different usages. They include some support for static inference and local name scopes. Furthermore, astroid can also build partial trees by inspecting living objects. Installation ------------ Extract the tarball, jump into the created directory and run:: pip install . If you want to do an editable installation, you can run:: pip install -e . If you have any questions, please mail the code-quality@python.org mailing list for support. See http://mail.python.org/mailman/listinfo/code-quality for subscription information and archives. Python Versions --------------- astroid 2.0 is currently available for Python 3 only. If you want Python 2 support, older versions of astroid will still supported until 2020. Test ---- Tests are in the 'test' subdirectory. To launch the whole tests suite, you can use either `tox` or `pytest`:: tox pytest astroid Platform: UNKNOWN Classifier: Topic :: Software Development :: Libraries :: Python Modules Classifier: Topic :: Software Development :: Quality Assurance Classifier: Programming Language :: Python Classifier: Programming Language :: Python :: 3 Classifier: Programming Language :: Python :: 3.4 Classifier: Programming Language :: Python :: 3.5 Classifier: Programming Language :: Python :: 3.6 Classifier: Programming Language :: Python :: 3.7 Classifier: Programming Language :: Python :: Implementation :: CPython Classifier: Programming Language :: Python :: Implementation :: PyPy Requires-Python: >=3.4.* astroid-2.0.1/astroid.egg-info/requires.txt0000644000076500000240000000021013324065077021467 0ustar claudiustaff00000000000000lazy_object_proxy six wrapt [:python_version < "3.5"] typing [:python_version < "3.7" and implementation_name == "cpython"] typed_ast astroid-2.0.1/astroid.egg-info/SOURCES.txt0000644000076500000240000002244113324065077020765 0ustar claudiustaff00000000000000COPYING COPYING.LESSER ChangeLog MANIFEST.in README.rst pytest.ini setup.cfg setup.py astroid/__init__.py astroid/__pkginfo__.py astroid/_ast.py astroid/arguments.py astroid/as_string.py astroid/bases.py astroid/builder.py astroid/context.py astroid/decorators.py astroid/exceptions.py astroid/helpers.py astroid/inference.py astroid/manager.py astroid/mixins.py astroid/modutils.py astroid/node_classes.py astroid/nodes.py astroid/objects.py astroid/protocols.py astroid/raw_building.py astroid/rebuilder.py astroid/scoped_nodes.py astroid/test_utils.py astroid/transforms.py astroid/util.py astroid.egg-info/PKG-INFO astroid.egg-info/SOURCES.txt astroid.egg-info/dependency_links.txt astroid.egg-info/requires.txt astroid.egg-info/top_level.txt astroid/brain/brain_attrs.py astroid/brain/brain_builtin_inference.py astroid/brain/brain_collections.py astroid/brain/brain_curses.py astroid/brain/brain_dateutil.py astroid/brain/brain_fstrings.py astroid/brain/brain_functools.py astroid/brain/brain_gi.py astroid/brain/brain_hashlib.py astroid/brain/brain_io.py astroid/brain/brain_mechanize.py astroid/brain/brain_multiprocessing.py astroid/brain/brain_namedtuple_enum.py astroid/brain/brain_nose.py astroid/brain/brain_numpy.py astroid/brain/brain_pkg_resources.py astroid/brain/brain_pytest.py astroid/brain/brain_qt.py astroid/brain/brain_random.py astroid/brain/brain_re.py astroid/brain/brain_six.py astroid/brain/brain_ssl.py astroid/brain/brain_subprocess.py astroid/brain/brain_threading.py astroid/brain/brain_typing.py astroid/brain/brain_uuid.py astroid/interpreter/__init__.py astroid/interpreter/dunder_lookup.py astroid/interpreter/objectmodel.py astroid/interpreter/_import/__init__.py astroid/interpreter/_import/spec.py astroid/interpreter/_import/util.py astroid/tests/__init__.py astroid/tests/resources.py astroid/tests/unittest_brain.py astroid/tests/unittest_brain_numpy.py astroid/tests/unittest_builder.py astroid/tests/unittest_helpers.py astroid/tests/unittest_inference.py astroid/tests/unittest_lookup.py astroid/tests/unittest_manager.py astroid/tests/unittest_modutils.py astroid/tests/unittest_nodes.py astroid/tests/unittest_object_model.py astroid/tests/unittest_objects.py astroid/tests/unittest_protocols.py astroid/tests/unittest_python3.py astroid/tests/unittest_raw_building.py astroid/tests/unittest_regrtest.py astroid/tests/unittest_scoped_nodes.py astroid/tests/unittest_transforms.py astroid/tests/unittest_utils.py astroid/tests/testdata/python2/data/MyPyPa-0.1.0-py2.5.egg astroid/tests/testdata/python2/data/MyPyPa-0.1.0-py2.5.zip astroid/tests/testdata/python2/data/__init__.py astroid/tests/testdata/python2/data/absimport.py astroid/tests/testdata/python2/data/all.py astroid/tests/testdata/python2/data/descriptor_crash.py astroid/tests/testdata/python2/data/email.py astroid/tests/testdata/python2/data/foogle_fax-0.12.5-py2.7-nspkg.pth astroid/tests/testdata/python2/data/format.py astroid/tests/testdata/python2/data/invalid_encoding.py astroid/tests/testdata/python2/data/module.py astroid/tests/testdata/python2/data/module2.py astroid/tests/testdata/python2/data/noendingnewline.py astroid/tests/testdata/python2/data/nonregr.py astroid/tests/testdata/python2/data/notall.py astroid/tests/testdata/python2/data/operator_precedence.py astroid/tests/testdata/python2/data/recursion.py astroid/tests/testdata/python2/data/tmp__init__.py astroid/tests/testdata/python2/data/SSL1/Connection1.py astroid/tests/testdata/python2/data/SSL1/__init__.py astroid/tests/testdata/python2/data/absimp/__init__.py astroid/tests/testdata/python2/data/absimp/string.py astroid/tests/testdata/python2/data/absimp/sidepackage/__init__.py astroid/tests/testdata/python2/data/appl/__init__.py astroid/tests/testdata/python2/data/appl/myConnection.py astroid/tests/testdata/python2/data/contribute_to_namespace/namespace_pep_420/submodule.py astroid/tests/testdata/python2/data/find_test/__init__.py astroid/tests/testdata/python2/data/find_test/module.py astroid/tests/testdata/python2/data/find_test/module2.py astroid/tests/testdata/python2/data/find_test/noendingnewline.py astroid/tests/testdata/python2/data/find_test/nonregr.py astroid/tests/testdata/python2/data/foogle/fax/__init__.py astroid/tests/testdata/python2/data/foogle/fax/a.py astroid/tests/testdata/python2/data/lmfp/__init__.py astroid/tests/testdata/python2/data/lmfp/foo.py astroid/tests/testdata/python2/data/module1abs/__init__.py astroid/tests/testdata/python2/data/module1abs/core.py astroid/tests/testdata/python2/data/namespace_pep_420/module.py astroid/tests/testdata/python2/data/notamodule/file.py astroid/tests/testdata/python2/data/package/__init__.py astroid/tests/testdata/python2/data/package/absimport.py astroid/tests/testdata/python2/data/package/hello.py astroid/tests/testdata/python2/data/package/import_package_subpackage_module.py astroid/tests/testdata/python2/data/package/subpackage/__init__.py astroid/tests/testdata/python2/data/package/subpackage/module.py astroid/tests/testdata/python2/data/path_pkg_resources_1/package/__init__.py astroid/tests/testdata/python2/data/path_pkg_resources_1/package/foo.py astroid/tests/testdata/python2/data/path_pkg_resources_2/package/__init__.py astroid/tests/testdata/python2/data/path_pkg_resources_2/package/bar.py astroid/tests/testdata/python2/data/path_pkg_resources_3/package/__init__.py astroid/tests/testdata/python2/data/path_pkg_resources_3/package/baz.py astroid/tests/testdata/python2/data/path_pkgutil_1/package/__init__.py astroid/tests/testdata/python2/data/path_pkgutil_1/package/foo.py astroid/tests/testdata/python2/data/path_pkgutil_2/package/__init__.py astroid/tests/testdata/python2/data/path_pkgutil_2/package/bar.py astroid/tests/testdata/python2/data/path_pkgutil_3/package/__init__.py astroid/tests/testdata/python2/data/path_pkgutil_3/package/baz.py astroid/tests/testdata/python2/data/unicode_package/__init__.py astroid/tests/testdata/python2/data/unicode_package/core/__init__.py astroid/tests/testdata/python3/data/MyPyPa-0.1.0-py2.5.egg astroid/tests/testdata/python3/data/MyPyPa-0.1.0-py2.5.zip astroid/tests/testdata/python3/data/__init__.py astroid/tests/testdata/python3/data/absimport.py astroid/tests/testdata/python3/data/all.py astroid/tests/testdata/python3/data/descriptor_crash.py astroid/tests/testdata/python3/data/email.py astroid/tests/testdata/python3/data/foogle_fax-0.12.5-py2.7-nspkg.pth astroid/tests/testdata/python3/data/format.py astroid/tests/testdata/python3/data/invalid_encoding.py astroid/tests/testdata/python3/data/module.py astroid/tests/testdata/python3/data/module2.py astroid/tests/testdata/python3/data/noendingnewline.py astroid/tests/testdata/python3/data/nonregr.py astroid/tests/testdata/python3/data/notall.py astroid/tests/testdata/python3/data/operator_precedence.py astroid/tests/testdata/python3/data/recursion.py astroid/tests/testdata/python3/data/tmp__init__.py astroid/tests/testdata/python3/data/SSL1/Connection1.py astroid/tests/testdata/python3/data/SSL1/__init__.py astroid/tests/testdata/python3/data/absimp/__init__.py astroid/tests/testdata/python3/data/absimp/string.py astroid/tests/testdata/python3/data/absimp/sidepackage/__init__.py astroid/tests/testdata/python3/data/appl/__init__.py astroid/tests/testdata/python3/data/appl/myConnection.py astroid/tests/testdata/python3/data/contribute_to_namespace/namespace_pep_420/submodule.py astroid/tests/testdata/python3/data/find_test/__init__.py astroid/tests/testdata/python3/data/find_test/module.py astroid/tests/testdata/python3/data/find_test/module2.py astroid/tests/testdata/python3/data/find_test/noendingnewline.py astroid/tests/testdata/python3/data/find_test/nonregr.py astroid/tests/testdata/python3/data/foogle/fax/__init__.py astroid/tests/testdata/python3/data/foogle/fax/a.py astroid/tests/testdata/python3/data/lmfp/__init__.py astroid/tests/testdata/python3/data/lmfp/foo.py astroid/tests/testdata/python3/data/module1abs/__init__.py astroid/tests/testdata/python3/data/module1abs/core.py astroid/tests/testdata/python3/data/namespace_pep_420/module.py astroid/tests/testdata/python3/data/notamodule/file.py astroid/tests/testdata/python3/data/package/__init__.py astroid/tests/testdata/python3/data/package/absimport.py astroid/tests/testdata/python3/data/package/hello.py astroid/tests/testdata/python3/data/package/import_package_subpackage_module.py astroid/tests/testdata/python3/data/package/subpackage/__init__.py astroid/tests/testdata/python3/data/package/subpackage/module.py astroid/tests/testdata/python3/data/path_pkg_resources_1/package/__init__.py astroid/tests/testdata/python3/data/path_pkg_resources_1/package/foo.py astroid/tests/testdata/python3/data/path_pkg_resources_2/package/__init__.py astroid/tests/testdata/python3/data/path_pkg_resources_2/package/bar.py astroid/tests/testdata/python3/data/path_pkg_resources_3/package/__init__.py astroid/tests/testdata/python3/data/path_pkg_resources_3/package/baz.py astroid/tests/testdata/python3/data/path_pkgutil_1/package/__init__.py astroid/tests/testdata/python3/data/path_pkgutil_1/package/foo.py astroid/tests/testdata/python3/data/path_pkgutil_2/package/__init__.py astroid/tests/testdata/python3/data/path_pkgutil_2/package/bar.py astroid/tests/testdata/python3/data/path_pkgutil_3/package/__init__.py astroid/tests/testdata/python3/data/path_pkgutil_3/package/baz.py astroid/tests/testdata/python3/data/unicode_package/__init__.py astroid/tests/testdata/python3/data/unicode_package/core/__init__.pyastroid-2.0.1/astroid.egg-info/top_level.txt0000644000076500000240000000001013324065077021617 0ustar claudiustaff00000000000000astroid astroid-2.0.1/ChangeLog0000644000076500000240000016360413324063457015523 0ustar claudiustaff00000000000000=================== astroid's ChangeLog =================== What's New in astroid 2.0.1? ========================== Release Date: 2018-07-19 * Released to clear an old wheel package on PyPI What's New in astroid 2.0? ========================== Release Date: 2018-07-15 * String representation of nodes takes in account precedence and associativity rules of operators. * Fix loading files with `modutils.load_from_module` when the path that contains it in `sys.path` is a symlink and the file is contained in a symlinked folder. Close #583 * Reworking of the numpy brain dealing with numerictypes (use of inspect module to determine the class hierarchy of numpy.core.numerictypes module) Close PyCQA/pylint#2140 * Added inference support for starred nodes in for loops Close #146 * Support unpacking for dicts in assignments Close #268 * Add support for inferring functools.partial Close #125 * Inference support for `dict.fromkeys` Close #110 * `int()` builtin is inferred as returning integers. Close #150 * `str()` builtin is inferred as returning strings. Close #148 * DescriptorBoundMethod has the correct number of arguments defined. * Improvement of the numpy numeric types definition. Close PyCQA/pylint#1971 * Subclasses of *property* are now interpreted as properties Close PyCQA/pylint#1601 * AsStringRegexpPredicate has been removed. Use transform predicates instead of it. * Switched to using typed_ast for getting access to type comments As a side effect of this change, some nodes gained a new `type_annotation` attribute, which, if the type comments were correctly parsed, should contain a node object with the corresponding objects from the type comment. * typing.X[...] and typing.NewType are inferred as classes instead of instances. * Module.__path__ is now a list It used to be a string containing the path, but it doesn't reflect the situation on Python, where it is actually a list. * Fix a bug with namespace package's __path__ attribute. Close #528 * Added brain tips for random.sample Part of PyCQA/pylint#811 * Add brain tip for `issubclass` builtin Close #101. * Fix submodule imports from six Close PyCQA/pylint#1640 * Fix missing __module__ and __qualname__ from class definition locals Close PYCQA/pylint#1753 * Fix a crash when __annotations__ access a parent's __init__ that does not have arguments Close #473 * Fix multiple objects sharing the same InferenceContext.path causing uninferable results Close #483 * Fix improper modification of col_offset, lineno upon inference of builtin functions Close PyCQA/pylint#1839 * Subprocess.Popen brain now knows of the args member Close PyCQA/pylint#1860 * add move_to_end method to collections.OrderedDict brain Close PyCQA/pylint#1872 * Include new hashlib classes added in python 3.6 * Fix RecursionError for augmented assign Close #437, #447, #313, PyCQA/pylint#1642, PyCQA/pylint#1805, PyCQA/pylint#1854, PyCQA/pylint#1452 * Add missing attrs special attribute Close PyCQA/pylint#1884 * Inference now understands the 'isinstance' builtin Close #98 * Stop duplicate nodes with the same key values from appearing in dictionaries from dictionary unpacking. Close PyCQA/pylint#1843 * Fix ``contextlib.contextmanager`` inference for nested context managers Close #1699 * Implement inference for len builtin Close #112 * Add qname method to Super object preventing potential errors in upstream pylint Close #533 * Stop astroid from getting stuck in an infinite loop if a function shares its name with its decorator Close #375 * Fix issue with inherited __call__ improperly inferencing self Close #PyCQA/pylint#2199 * Fix __call__ precedence for classes with custom metaclasses Close PyCQA/pylint#2159 * Limit the maximum amount of interable result in an NodeNG.infer() call to 100 by default for performance issues with variables with large amounts of possible values. The max inferable value can be tuned by setting the `max_inferable_values` flag on astroid.MANAGER. What's New in astroid 1.6.0? ============================ Release Date: 2017-12-15 * When verifying duplicates classes in MRO, ignore on-the-fly generated classes Close PyCQA/pylint#1706 * Add brain tip for attrs library to prevent unsupported-assignment-operation false positives Close PYCQA/pylint#1698 * file_stream was removed, since it was deprecated for three releases Instead one should use the .stream() method. * Vast improvements to numpy support * Add brain tips for curses Close PyCQA/pylint#1703 * Add brain tips for UUID.int Close PyCQA/pylint#961 * The result of using object.__new__ as class decorator is correctly inferred as instance Close #172 * Enums created with functional syntax are now iterable * Enums created with functional syntax are now subscriptable * Don't crash when getting the string representation of BadUnaryOperationMessage In some cases, when the operand does not have a .name attribute, getting the string representation of a BadUnaryOperationMessage leads to a crash. Close PyCQA/pylint#1563 * Don't raise DuplicateBaseError when classes at different locations are used For instance, one can implement a namedtuple base class, which gets reused on a class with the same name later on in the file. Until now, we considered these two classes as being the same, because they shared the name, but in fact they are different, being created at different locations and through different means. Close PyCQA/pylint#1458 * The func form of namedtuples with keywords is now understood Close PyCQA/pylint#1530 * Fix inference for nested calls * Dunder class at method level is now inferred as the class of the method Close PyCQA/pylint#1328 * Stop most inference tip overwrites from happening by using predicates on existing inference_tip transforms. Close #472 * Fix object.__new__(cls) calls in classmethods by using a context which has the proper boundnode for the given argument Close #404 * Fix Pathlib type inference Close PyCQA/pylint#224 Close PyCQA/pylint#1660 What's New in astroid 1.5.3? ============================ Release Date: 2017-06-03 * enum34 dependency is forced to be at least version 1.1.3. Fixes spurious bug related to enum classes being falsy in boolean context, which caused _Inconsistent Hierarchy_ `RuntimeError` in `singledispatch` module. See links below for details: - http://bugs.python.org/issue26748 - https://bitbucket.org/ambv/singledispatch/issues/8/inconsistent-hierarchy-with-enum - https://bitbucket.org/stoneleaf/enum34/commits/da50803651ab644e6fce66ebc85562f1117c344b * Do not raise an exception when uninferable value is unpacked in ``with`` statement. * Lock objects from ``threading`` module are now correctly recognised as context managers. What's New in astroid 1.5.2? ============================ Release Date: 2017-04-17 * Basic support for the class form of typing.NamedTuple * mro() can be computed for classes with old style classes in the hierarchy What's New in astroid 1.5.0? ============================ Release Date: 2017-04-13 * Arguments node gained a new attribute, ``kwonlyargs_annotations`` This new attribute holds the annotations for the keyword-only arguments. * `namedtuple` inference now understands `rename` keyword argument * Classes can now know their definition-time arguments. Classes can support keyword arguments, which are passed when a class is constructed using ``__new__``. * Add support for inferring typing.NamedTuple. * ClassDef now supports __getitem__ inference through the metaclass. * getitem() method accepts nodes now, instead of Python objects. * Add support for explicit namespace packages, created with pkg_resources. * Add brain tips for _io.TextIOWrapper's buffer and raw attributes. * Add `returns` into the proper order in FunctionDef._astroid_fields The order is important, since it determines the last child, which in turn determines the last line number of a scoped node. * Add brain tips for functools.lru_cache. * New function, astroid.extract_node, exported out from astroid.test_utils. * Stop saving assignment locals in ExceptHandlers, when the context is a store. This fixes a tripping case, where the RHS of a ExceptHandler can be redefined by the LHS, leading to a local save. For instance, ``except KeyError, exceptions.IndexError`` could result in a local save for IndexError as KeyError, resulting in potential unexpected inferences. Since we don't lose a lot, this syntax gets prohibited. * Fix a crash which occurred when the class of a namedtuple could not be inferred. * Add support for implicit namespace packages (PEP 420) This change involves a couple of modifications. First, we're relying on a spec finder protocol, inspired by importlib's ModuleSpec, for finding where a file or package is, using importlib's PathFinder as well, which enable us to discover namespace packages as well. This discovery is the center piece of the namespace package support, the other part being the construction of a dummy Module node whenever a namespace package root directory is requested during astroid's import references. * Introduce a special attributes model Through this model, astroid starts knowing special attributes of certain Python objects, such as functions, classes, super objects and so on. This was previously possible before, but now the lookup and the attributes themselves are separated into a new module, objectmodel.py, which describes, in a more comprehensive way, the data model of each object. * Exceptions have their own object model Some of exceptions's attributes, such as .args and .message, can't be inferred correctly since they are descriptors that get transformed into the proper objects at runtime. This can cause issues with the static analysis, since they are inferred as different than what's expected. Now when we're creating instances of exceptions, we're inferring a special object that knows how to transform those runtime attributes into the proper objects via a custom object model. Closes issue #81 * dict.values, dict.keys and dict.items are properly inferred to their corresponding type, which also includes the proper containers for Python 3. * Fix a crash which occurred when a method had a same name as a builtin object, decorated at the same time by that builtin object ( a property for instance) * The inference can handle the case where the attribute is accessed through a subclass of a base class and the attribute is defined at the base class's level, by taking in consideration a redefinition in the subclass. This should fix https://github.com/PyCQA/pylint/issues/432 * Calling lambda methods (defined at class level) can be understood. * Don't take in consideration invalid assignments, especially when __slots__ declaration forbids them. Close issue #332 * Functional form of enums support accessing values through __call__. * Brain tips for the ssl library. * decoratornames() does not leak InferenceError anymore. * wildcard_imported_names() got replaced by _public_names() Our understanding of wildcard imports through __all__ was half baked to say at least, since we couldn't account for modifications of the list, which results in tons of false positives. Instead, we replaced it with _public_names(), a method which returns all the names that are publicly available in a module, that is that don't start with an underscore, even though this means that there is a possibility for other names to be leaked out even though they are not present in the __all__ variable. The method is private in 1.4.X. * unpack_infer raises InferenceError if it can't operate with the given sequences of nodes. * Support accessing properties with super(). * Enforce strong updates per frames. When looking up a name in a scope, Scope.lookup will return only the values which will be reachable after execution, as seen in the following code: a = 1 a = 2 In this case it doesn't make sense to return two values, but only the last one. * Add support for inference on threading.Lock As a matter of fact, astroid can infer on threading.RLock, threading.Semaphore, but can't do it on threading.Lock (because it comes from an extension module). * pkg_resources brain tips are a bit more specific, by specifying proper returns. * The slots() method conflates all the slots from the ancestors into a list of current and parent slots. We're doing this because this is the right semantics of slots, they get inherited, as long as each parent defines a __slots__ entry. * Some nodes got a new attribute, 'ctx', which tells in which context the said node was used. The possible values for the contexts are `Load` ('a'), `Del` ('del a'), `Store` ('a = 4') and the nodes that got the new attribute are Starred, Subscript, List and Tuple. Closes issue #267. * relative_to_absolute_name or methods calling it will now raise TooManyLevelsError when a relative import was trying to access something beyond the top-level package. * AstroidBuildingException is now AstroidBuildingError. The first name will exist until astroid 2.0. * Add two new exceptions, AstroidImportError and AstroidSyntaxError. They are subclasses of AstroidBuildingException and are raised when a module can't be imported from various reasons. Also do_import_module lets the errors to bubble up without converting them to InferenceError. This particular conversion happens only during the inference. * Revert to using printf-style formatting in as_string, in order to avoid a potential problem with encodings when using .format. Closes issue #273. Patch by notsqrt. * assigned_stmts methods have the same signature from now on. They used to have different signatures and each one made assumptions about what could be passed to other implementations, leading to various possible crashes when one or more arguments weren't given. Closes issue #277. * Fix metaclass detection, when multiple keyword arguments are used in class definition. * Add support for annotated variable assignments (PEP 526) * Starred expressions are now inferred correctly for tuple, list, set, and dictionary literals. * Support for asynchronous comprehensions introduced in Python 3.6. Fixes #399. See PEP530 for details. What's New in astroid 1.4.1? ============================ Release Date: 2015-11-29 * Add support for handling Uninferable nodes when calling as_string Some object, for instance List or Tuple can have, after inference, Uninferable as their elements, happening when their components weren't couldn't be inferred properly. This means that as_string needs to cope with expecting Uninferable nodes part of the other nodes coming for a string transformation. The patch adds a visit method in AsString and ``accept`` on Yes / Uninferable nodes. Closes issue #270. What's New in astroid 1.4.0? ============================ Release Date: 2015-11-29 * Class.getattr('__mro__') returns the actual MRO. Closes issue #128. * The logilab-common dependency is not needed anymore as the needed code was integrated into astroid. * Generated enum member stubs now support IntEnum and multiple base classes. * astroid.builder.AstroidBuilder.string_build and astroid.builder.AstroidBuilder.file_build are now raising AstroidBuildingException when the parsing of the string raises a SyntaxError. * Add brain tips for multiprocessing.Manager and multiprocessing.managers.SyncManager. * Add some fixes which enhances the Jython support. The fix mostly includes updates to modutils, which is modified in order to properly lookup paths from live objects, which ends in $py.class, not pyc as for Python 2, Closes issue #83. * The Generator objects inferred with `infer_call_result` from functions have as parent the function from which they are returned. * Add brain tips for multiprocessing post Python 3.4+, where the module level functions are retrieved with getattr from a context object, leading to many no-member errors in Pylint. * Understand partially the 3-argument form of `type`. The only change is that astroid understands members passed in as dictionaries as the third argument. * .slots() will return an empty list for classes with empty slots. Previously it returned None, which is the same value for classes without slots at all. This was changed in order to better reflect what's actually happening. * Improve the inference of Getattr nodes when dealing with abstract properties from the abc module. In astroid.bases.Instance._wrap_attr we had a detection code for properties, which basically inferred whatever a property returned, passing the results up the stack, to the igetattr() method. It handled only the builtin property but the new patch also handles a couple of other properties, such as abc.abstractproperty. * UnboundMethod.getattr calls the getattr of its _proxied object and doesn't call super(...) anymore. It previously crashed, since the first ancestor in its mro was bases.Proxy and bases.Proxy doesn't implement the .getattr method. Closes issue #91. * Don't hard fail when calling .mro() on a class which has combined both newstyle and old style classes. The class in question is actually newstyle (and the __mro__ can be retrieved using Python). .mro() fallbacks to using .ancestors() in that case. * Class.local_attr and Class.local_attr_ancestors uses internally a mro lookup, using .mro() method, if they can. That means for newstyle classes, when trying to lookup a member using one of these functions, the first one according to the mro will be returned. This reflects nicely the reality, but it can have as a drawback the fact that it is a behaviour change (the previous behaviour was incorrect though). Also, having bases which can return multiple values when inferred will not work with the new approach, because .mro() only retrieves the first value inferred from a base. * Expose a implicit_metaclass() method in Class. This will return a builtins.type instance for newstyle classes. * Add two new exceptions for handling MRO error cases. DuplicateBasesError is emitted when duplicate bases are found in a class, InconsistentMroError is raised when the method resolution is determined to be inconsistent. They share a common class, MroError, which is a subclass of ResolveError, meaning that this change is backwards compatible. * Classes aren't marked as interfaces anymore, in the `type` attribute. * Class.has_dynamic_getattr doesn't return True for special methods which aren't implemented in pure Python, as it is the case for extension modules. Since most likely the methods were coming from a live object, this implies that all of them will have __getattr__ and __getattribute__ present and it is wrong to consider that those methods were actually implemented. * Add basic support for understanding context managers. Currently, there's no way to understand whatever __enter__ returns in a context manager and what it is binded using the ``as`` keyword. With these changes, we can understand ``bar`` in ``with foo() as bar``, which will be the result of __enter__. * Add a new type of node, called *inference objects*. Inference objects are similar with AST nodes, but they can be obtained only after inference, so they can't be found inside the original AST tree. Their purpose is to handle at astroid level some operations which can't be handled when using brain transforms. For instance, the first object added is FrozenSet, which can be manipulated at astroid's level (inferred, itered etc). Code such as this 'frozenset((1,2))' will not return an Instance of frozenset, without having access to its content, but a new objects.FrozenSet, which can be used just as a nodes.Set. * Add a new *inference object* called Super, which also adds support for understanding super calls. astroid understands the zero-argument form of super, specific to Python 3, where the interpreter fills itself the arguments of the call. Also, we are understanding the 2-argument form of super, both for bounded lookups (super(X, instance)) as well as for unbounded lookups (super(X, Y)), having as well support for validating that the object-or-type is a subtype of the first argument. The unbounded form of super (one argument) is not understood, since it's useless in practice and should be removed from Python's specification. Closes issue #89. * Add inference support for getattr builtin. Now getattr builtins are properly understood. Closes issue #103. * Add inference support for hasattr builtin. Closes issue #102. * Add 'assert_equals' method in nose.tools's brain plugin. * Don't leak StopIteration when inferring invalid UnaryOps (+[], +None etc.). * Improve the inference of UnaryOperands. When inferring unary operands, astroid looks up the return value of __pos__, __neg__ and __invert__ to determine the inferred value of ``~node``, ``+node`` or ``-node``. * Improve the inference of six.moves, especially when using `from ... import ...` syntax. Also, we added a new fail import hook for six.moves, which fixes the import-error false positive from pylint. Closes issue #107. * Make the first steps towards detecting type errors for unary and binary operations. In exceptions, one object was added for holding information about a possible UnaryOp TypeError, object called `UnaryOperationError`. Even though the name suggests it's an exception, it's actually not one. When inferring UnaryOps, we use this special object to mark a possible TypeError, object which can be interpreted by pylint in order to emit a new warning. We are also exposing a new method for UnaryOps, called `type_errors`, which returns a list of UnaryOperationsError. * A new method was added to the AST nodes, 'bool_value'. It is used to deduce the value of a node when used in a boolean context, which is useful for both inference, as well as for data flow analysis, where we are interested in what branches will be followed when the program will be executed. `bool_value` returns True, False or YES, if the node's boolean value can't be deduced. The method is used when inferring the unary operand `not`. Thus, `not something` will result in calling `something.bool_value` and negating the result, if it is a boolean. * Add inference support for boolean operations (`and` and `not`). * Add inference support for the builtin `callable`. * astroid.inspector was moved to pylint.pyreverse, since it is the only known client of this module. No other change was made to the exported API. * astroid.utils.ASTWalker and astroid.utils.LocalsVisitor were moved to pylint.pyreverse.utils. * Add inference support for the builtin `bool`. * Add `igetattr` method to scoped_nodes.Function. * Add support for Python 3.5's MatMul operation: see PEP 465 for more details. * NotImplemented is detected properly now as being part of the builtins module. Previously trying to infer the Name(NotImplemented) returned an YES object. * Add astroid.helpers, a module of various useful utilities which don't belong yet into other components. Added *object_type*, a function which can be used to obtain the type of almost any astroid object, similar to how the builtin *type* works. * Understand the one-argument form of the builtin *type*. This uses the recently added *astroid.helpers.object_type* in order to retrieve the Python type of the first argument of the call. * Add helpers.is_supertype and helpers.is_subtype, two functions for checking if an object is a super/sub type of another. * Improve the inference of binary arithmetic operations (normal and augmented). * Add support for retrieving TypeErrors for binary arithmetic operations. The change is similar to what was added for UnaryOps: a new method called *type_errors* for both AugAssign and BinOp, which can be used to retrieve type errors occurred during inference. Also, a new exception object was added, BinaryOperationError. * Lambdas found at class level, which have a `self` argument, are considered BoundMethods when accessing them from instances of their class. * Add support for multiplication of tuples and lists with instances which provides an __index__ returning-int method. * Add support for indexing containers with instances which provides an __index__ returning-int method. * Star unpacking in assignments returns properly a list, not the individual components. Closes issue #138. * Add annotation support for function.as_string(). Closes issue #37. * Add support for indexing bytes on Python 3. * Add support for inferring subscript on instances, which will use __getitem__. Closes issue #124. * Add support for pkg_resources.declare_namespaces. * Move pyreverse specific modules and functionality back into pyreverse (astroid.manager.Project, astroid.manager.Manager.project_from_files). * Understand metaclasses added with six.add_metaclass decorator. Closes issue #129. * Add a new convenience API, `astroid.parse`, which can be used to retrieve an astroid AST from a source code string, similar to how ast.parse can be used to obtain a Python AST from a source string. This is the test_utils.build_module promoted to a public API. * do_import_module passes the proper relative_only flag if the level is higher than 1. This has the side effect that using `from .something import something` in a non-package will finally result in an import-error on Pylint's side. Until now relative_only was ignored, leading to the import of `something`, if it was globally available. * Add get_wrapping_class API to scoped_nodes, which can be used to retrieve the class that wraps a node. * Class.getattr looks by default in the implicit and the explicit metaclasses, which is `type` on Python 3. Closes issue #114. * There's a new separate step for transforms. Until now, the transforms were applied at the same time the tree was being built. This was problematic if the transform functions were using inference, since the inference was executed on a partially constructed tree, which led to failures when post-building information was needed (such as setting the _from_names for the From imports). Now there's a separate step for transforms, which are applied using transform.TransformVisitor. There's a couple of other related changes: * astroid.parse and AstroidBuilder gained a new parameter `apply_transforms`, which is a boolean flag, which will control if the transforms are applied. We do this because there are uses when the vanilla tree is wanted, without any implicit modification. * the transforms are also applied for builtin modules, as a side effect of the fact that transform visiting was moved in AstroidBuilder._post_build from AstroidBuilder._data_build. Closes issue #116. * Class._explicit_metaclass is now a public API, in the form of Class.declared_metaclass. Class.mro remains the de facto method for retrieving the metaclass of a class, which will also do an evaluation of what declared_metaclass returns. * Understand slices of tuples, lists, strings and instances with support for slices. Closes issue #137. * Add proper grammatical names for `infered` and `ass_type` methods, namely `inferred` and `assign_type`. The old methods will raise PendingDeprecationWarning, being slated for removal in astroid 2.0. * Add new AST names in order to be similar to the ones from the builtin ast module. With this change, Getattr becomes Attributes, Backquote becomes Repr, Class is ClassDef, Function is FunctionDef, Discard is Expr, CallFunc is Call, From is ImportFrom, AssName is AssignName and AssAttr is AssignAttr. The old names are maintained for backwards compatibility and they are interchangeable, in the sense that using Discard will use Expr under the hood and the implemented visit_discard in checkers will be called with Expr nodes instead. The AST does not contain the old nodes, only the interoperability between them hides this fact. Recommendations to move to the new nodes are emitted accordingly, the old names will be removed in astroid 2.0. * Add support for understanding class creation using `type.__new__(mcs, name, bases, attrs)`` Until now, inferring this kind of calls resulted in Instances, not in classes, since astroid didn't understand that the presence of the metaclass in the call leads to a class creating, not to an instance creation. * Understand the `slice` builtin. Closes issue #184. * Add brain tips for numpy.core, which should fix Pylint's #453. * Add a new node, DictUnpack, which is used to represent the unpacking of a dictionary into another dictionary, using PEP 448 specific syntax ({1:2, **{2:3}) This is a different approach than what the builtin ast module does, since it just uses None to represent this kind of operation, which seems conceptually wrong, due to the fact the AST contains non-AST nodes. Closes issue #206. What's New in astroid 1.3.6? ============================ Release Date: 2015-03-14 * Class.slots raises NotImplementedError for old style classes. Closes issue #67. * Add a new option to AstroidManager, `optimize_ast`, which controls if peephole optimizer should be enabled or not. This prevents a regression, where the visit_binop method wasn't called anymore with astroid 1.3.5, due to the differences in the resulting AST. Closes issue #82. What's New in astroid 1.3.5? ============================ Release Date: 2015-03-11 * Add the ability to optimize small ast subtrees, with the first use in the optimization of multiple BinOp nodes. This removes recursivity in the rebuilder when dealing with a lot of small strings joined by the addition operator. Closes issue #59. * Obtain the methods for the nose brain tip through an unittest.TestCase instance. Closes Pylint issue #457. * Fix a crash which occurred when a class was the ancestor of itself. Closes issue #78. * Improve the scope_lookup method for Classes regarding qualified objects, with an attribute name exactly as one provided in the class itself. For example, a class containing an attribute 'first', which was also an import and which had, as a base, a qualified name or a Gettattr node, in the form 'module.first', then Pylint would have inferred the `first` name as the function from the Class, not the import. Closes Pylint issue #466. * Implement the assigned_stmts operation for Starred nodes, which was omitted when support for Python 3 was added in astroid. Closes issue #36. What's New in astroid 1.3.4? ============================ Release Date: 2015-01-17 * Get the first element from the method list when obtaining the functions from nose.tools.trivial. Closes Pylint issue #448. What's New in astroid 1.3.3? ============================ Release Date: 2015-01-16 * Restore file_stream to a property, but deprecate it in favour of the newly added method Module.stream. By using a method instead of a property, it will be easier to properly close the file right after it is used, which will ensure that no file descriptors are leaked. Until now, due to the fact that a module was cached, it was not possible to close the file_stream anywhere. file_stream will start emitting PendingDeprecationWarnings in astroid 1.4, DeprecationWarnings in astroid 1.5 and it will be finally removed in astroid 1.6. * Add inference tips for 'tuple', 'list', 'dict' and 'set' builtins. * Add brain definition for most string and unicode methods * Changed the API for Class.slots. It returns None when the class doesn't define any slots. Previously, for both the cases where the class didn't have slots defined and when it had an empty list of slots, Class.slots returned an empty list. * Add a new method to Class nodes, 'mro', for obtaining the the method resolution order of the class. * Add brain tips for six.moves. Closes issue #63. * Improve the detection for functions decorated with decorators which returns static or class methods. * .slots() can contain unicode strings on Python 2. * Add inference tips for nose.tools. What's New in astroid 1.3.2? ============================ Release Date: 2014-11-22 * Fixed a crash with invalid subscript index. * Implement proper base class semantics for Python 3, where every class derives from object. * Allow more fine-grained control over C extension loading in the manager. What's New in astroid 1.3.1? ============================ Release Date: 2014-11-21 * Fixed a crash issue with the pytest brain module. What's New in astroid 1.3.0? ============================ Release Date: 2014-11-20 * Fix a maximum recursion error occurred during the inference, where statements with the same name weren't filtered properly. Closes pylint issue #295. * Check that EmptyNode has an underlying object in EmptyNode.has_underlying_object. * Simplify the understanding of enum members. * Fix an infinite loop with decorator call chain inference, where the decorator returns itself. Closes issue #50. * Various speed improvements. Patch by Alex Munroe. * Add pytest brain plugin. Patch by Robbie Coomber. * Support for Python versions < 2.7 has been dropped, and the source has been made compatible with Python 2 and 3. Running 2to3 on installation for Python 3 is not needed anymore. * astroid now depends on six. * modutils._module_file opens __init__.py in binary mode. Closes issues #51 and #13. * Only C extensions from trusted sources (the standard library) are loaded into the examining Python process to build an AST from the live module. * Path names on case-insensitive filesystems are now properly handled. This fixes the stdlib detection code on Windows. * Metaclass-generating functions like six.with_metaclass are now supported via some explicit detection code. * astroid.register_module_extender has been added to generalize the support for module extenders as used by many brain plugins. * brain plugins can now register hooks to handle failed imports, as done by the gobject-introspection plugin. * The modules have been moved to a separate package directory, `setup.py develop` now works correctly. What's New in astroid 1.2.1? ============================ Release Date: 2014-08-24 * Fix a crash occurred when inferring decorator call chain. Closes issue #42. * Set the parent of vararg and kwarg nodes when inferring them. Closes issue #43. * namedtuple inference knows about '_fields' attribute. * enum members knows about the methods from the enum class. * Name inference will lookup in the parent function of the current scope, in case searching in the current scope fails. * Inference of the functional form of the enums takes into consideration the various inputs that enums accepts. * The inference engine handles binary operations (add, mul etc.) between instances. * Fix an infinite loop in the inference, by returning a copy of instance attributes, when calling 'instance_attr'. Closes issue #34 (patch by Emile Anclin). * Don't crash when trying to infer unbound object.__new__ call. Closes issue #11. What's New in astroid 1.2.0? ============================ Release Date: 2014-07-25 * Function nodes can detect decorator call chain and see if they are decorated with builtin descriptors (`classmethod` and `staticmethod`). * infer_call_result called on a subtype of the builtin type will now return a new `Class` rather than an `Instance`. * `Class.metaclass()` now handles module-level __metaclass__ declaration on python 2, and no longer looks at the __metaclass__ class attribute on python 3. * Function nodes can detect if they are decorated with subclasses of builtin descriptors when determining their type (`classmethod` and `staticmethod`). * Add `slots` method to `Class` nodes, for retrieving the list of valid slots it defines. * Expose function annotation to astroid: `Arguments` node exposes 'varargannotation', 'kwargannotation' and 'annotations' attributes, while `Function` node has the 'returns' attribute. * Backported most of the logilab.common.modutils module there, as most things there are for pylint/astroid only and we want to be able to fix them without requiring a new logilab.common release * Fix names grabbed using wildcard import in "absolute import mode" (ie with absolute_import activated from the __future__ or with python 3). Fix pylint issue #58. * Add support in pylint-brain for understanding enum classes. What's New in astroid 1.1.1? ============================ Release Date: 2014-04-30 * `Class.metaclass()` looks in ancestors when the current class does not define explicitly a metaclass. * Do not cache modules if a module with the same qname is already known, and only return cached modules if both name and filepath match. Fixes pylint Bitbucket issue #136. What's New in astroid 1.1.0? ============================ Release Date: 2014-04-18 * All class nodes are marked as new style classes for Py3k. * Add a `metaclass` function to `Class` nodes to retrieve their metaclass. * Add a new YieldFrom node. * Add support for inferring arguments to namedtuple invocations. * Make sure that objects returned for namedtuple inference have parents. * Don't crash when inferring nodes from `with` clauses with multiple context managers. Closes #18. * Don't crash when a class has some __call__ method that is not inferable. Closes #17. * Unwrap instances found in `.ancestors()`, by using their _proxied class. What's New in astroid 1.0.1? ============================ Release Date: 2013-10-18 * fix py3k/windows installation issue (issue #4) * fix bug with namedtuple inference (issue #3) * get back gobject introspection from pylint-brain * fix some test failures under pypy and py3.3, though there is one remaining in each of these platform (2.7 tests are all green) What's New in astroid 1.0.0? ============================= Release Date: 2013-07-29 * Fix some omissions in py2stdlib's version of hashlib and add a small test for it. * Properly recognize methods annotated with abc.abstract{property,method} as abstract. * Allow transformation functions on any node, providing a `register_transform` function on the manager instead of the `register_transformer` to make it more flexible wrt node selection * Use the new transformation API to provide support for namedtuple (actually in pylint-brain, closes #8766) * Added the test_utils module for building ASTs and extracting deeply nested nodes for easier testing. * Add support for py3k's keyword only arguments (PEP 3102) * RENAME THE PROJECT to astroid What's New in astroid 0.24.3? ============================= Release Date: 2013-04-16 * #124360 [py3.3]: Don't crash on 'yield from' nodes * #123062 [pylint-brain]: Use correct names for keywords for urlparse * #123056 [pylint-brain]: Add missing methods for hashlib * #123068: Fix inference for generator methods to correctly handle yields in lambdas. * #123068: Make sure .as_string() returns valid code for yields in expressions. * #47957: Set literals are now correctly treated as inference leaves. * #123074: Add support for inference of subscript operations on dict literals. What's New in astroid 0.24.2? ============================= Release Date: 2013-02-27 * pylint-brain: more subprocess.Popen faking (see #46273) * #109562 [jython]: java modules have no __doc__, causing crash * #120646 [py3]: fix for python3.3 _ast changes which may cause crash * #109988 [py3]: test fixes What's New in astroid 0.24.1? ============================= Release Date: 2012-10-05 * #106191: fix __future__ absolute import w/ From node * #50395: fix function fromlineno when some decorator is splited on multiple lines (patch by Mark Gius) * #92362: fix pyreverse crash on relative import * #104041: fix crash 'module object has no file_encoding attribute' * #4294 (pylint-brain): bad inference on mechanize.Browser.open * #46273 (pylint-brain): bad inference subprocess.Popen.communicate What's New in astroid 0.24.0? ============================= Release Date: 2012-07-18 * include pylint brain extension, describing some stuff not properly understood until then. (#100013, #53049, #23986, #72355) * #99583: fix raw_building.object_build for pypy implementation * use `open` rather than `file` in scoped_nodes as 2to3 miss it What's New in astroid 0.23.1? ============================= Release Date: 2011-12-08 * #62295: avoid "OSError: Too many open files" by moving .file_stream as a Module property opening the file only when needed * Lambda nodes should have a `name` attribute * only call transformers if modname specified What's New in astroid 0.23.0? ============================= Release Date: 2011-10-07 * #77187: ancestor() only returns the first class when inheriting from two classes coming from the same module * #76159: putting module's parent directory on the path causes problems linting when file names clash * #74746: should return empty module when __main__ is imported (patch by google) * #74748: getitem protocol return constant value instead of a Const node (patch by google) * #77188: support lgc.decorators.classproperty * #77253: provide a way for user code to register astng "transformers" using manager.register_transformer(callable) where callable will be called after an astng has been built and given the related module node as argument What's New in astroid 0.22.0? ============================= Release Date: 2011-07-18 * added column offset information on nodes (patch by fawce) * #70497: Crash on AttributeError: 'NoneType' object has no attribute '_infer_name' * #70381: IndentationError in import causes crash * #70565: absolute imports treated as relative (patch by Jacek Konieczny) * #70494: fix file encoding detection with python2.x * py3k: __builtin__ module renamed to builtins, we should consider this to properly build ast for builtin objects What's New in astroid 0.21.1? ============================= Release Date: 2011-01-11 * python3: handle file encoding; fix a lot of tests * fix #52006: "True" and "False" can be assigned as variable in Python2x * fix #8847: pylint doesn't understand function attributes at all * fix #8774: iterator / generator / next method * fix bad building of ast from living object w/ container classes (eg dict, set, list, tuple): contained elements should be turned to ast as well (not doing it will much probably cause crash later) * somewhat fix #57299 and other similar issue: Exception when trying to validate file using PyQt's PyQt4.QtCore module: we can't do much about it but at least catch such exception to avoid crash What's New in astroid 0.21.0? ============================= Release Date: 2010-11-15 * python3.x: first python3.x release * fix #37105: Crash on AttributeError: 'NoneType' object has no attribute '_infer_name' * python2.4: drop python < 2.5 support What's New in astroid 0.20.4? ============================= Release Date: 2010-10-27 * fix #37868 #37665 #33638 #37909: import problems with absolute_import_activated * fix #8969: false positive when importing from zip-safe eggs * fix #46131: minimal class decorator support * minimal python2.7 support (dict and set comprehension) * important progress on Py3k compatibility What's New in astroid 0.20.3? ============================= Release Date: 2010-09-28 * restored python 2.3 compatibility * fix #45959: AttributeError: 'NoneType' object has no attribute 'frame', due to handling of __class__ when importing from living object (because of missing source code or C-compiled object) What's New in astroid 0.20.2? ============================= Release Date: 2010-09-10 * fix astng building bug: we've to set module.package flag at the node creation time otherwise we'll miss this information when inferring relative import during the build process (this should fix for instance some problems with numpy) * added __subclasses__ to special class attribute * fix Class.interfaces so that no InferenceError raised on empty __implements__ * yield YES on multiplication of tuple/list with non valid operand What's New in astroid 0.20.1? ============================= Release Date: 2010-05-11 * fix licensing to LGPL * add ALL_NODES_CLASSES constant to nodes module * nodes redirection cleanup (possible since refactoring) * bug fix for python < 2.5: add Delete node on Subscript nodes if we are in a del context What's New in astroid 0.20.0? ============================= Release Date: 2010-03-22 * fix #20464: raises ?TypeError: '_Yes' object is not iterable? on list inference * fix #19882: pylint hangs * fix #20759: crash on pyreverse UNARY_OP_METHOD KeyError '~' * fix #20760: crash on pyreverse : AttributeError: 'Subscript' object has no attribute 'infer_lhs' * fix #21980: [Python-modules-team] Bug#573229 : Pylint hangs; improving the cache yields a speed improvement on big projects * major refactoring: rebuild the tree instead of modify / monkey patching * fix #19641: "maximum recursion depth exceeded" messages w/ python 2.6 this was introduced by a refactoring * Ned Batchelder patch to properly import eggs with Windows line endings. This fixes a problem with pylint not being able to import setuptools. * Winfried Plapper patches fixing .op attribute value for AugAssign nodes, visit_ifexp in nodes_as_string * Edward K. Ream / Tom Fleck patch closes #19641 (maximum recursion depth exceeded" messages w/ python 2.6), see https://bugs.launchpad.net/pylint/+bug/456870 What's New in astroid 0.19.3? ============================= Release Date: 2009-12-18 * fix name error making 0.19.2 almost useless What's New in astroid 0.19.2? ============================= Release Date: 2009-12-18 * fix #18773: inference bug on class member (due to bad handling of instance / class nodes "bounded" to method calls) * fix #9515: strange message for non-class "Class baz has no egg member" (due to bad inference of function call) * fix #18953: inference fails with augmented assignment (special case for augmented assignement in infer_ass method) * fix #13944: false positive for class/instance attributes (Instance.getattr should return assign nodes on instance classes as well as instance. * include spelling fixes provided by Dotan Barak What's New in astroid 0.19.1? ============================= Release Date: 2009-08-27 * fix #8771: crash on yield expression * fix #10024: line numbering bug with try/except/finally * fix #10020: when building from living object, __name__ may be None * fix #9891: help(logilab.astng) throws TypeError * fix #9588: false positive E1101 for augmented assignment What's New in astroid 0.19.0? ============================= Release Date: 2009-03-25 * fixed python 2.6 issue (tests ok w/ 2.4, 2.5, 2.6. Anyone using 2.2 / 2.3 to tell us if it works?) * some understanding of the __builtin__.property decorator * inference: introduce UnboundMethod / rename InstanceMethod to BoundMethod What's New in astroid 0.18.0? ============================= Release Date: 2009-03-19 * major api / tree structure changes to make it works with compiler *and* python >= 2.5 _ast module * cleanup and refactoring on the way What's New in astroid 0.17.4? ============================= Release Date: 2008-11-19 * fix #6015: filter statements bug triggering W0631 false positive in pylint * fix #5571: Function.is_method() should return False on module level functions decorated by staticmethod/classmethod (avoid some crash in pylint) * fix #5010: understand python 2.5 explicit relative imports What's New in astroid 0.17.3? ============================= Release Date: 2008-09-10 * fix #5889: astng crash on certain pyreverse projects * fix bug w/ loop assignment in .lookup * apply Maarten patch fixing a crash on TryFinalaly.block_range and fixing 'else'/'final' block line detection What's New in astroid 0.17.2? ============================= Release Date: 2008-01-14 * "with" statement support, patch provided by Brian Hawthorne * fixed recursion arguments in nodes_of_class method as notified by Dave Borowitz * new InstanceMethod node introduced to wrap bound method (e.g. Function node), patch provided by Dave Borowitz What's New in astroid 0.17.1? ============================= Release Date: 2007-06-07 * fix #3651: crash when callable as default arg * fix #3670: subscription inference crash in some cases * fix #3673: Lambda instance has no attribute 'pytype' * fix crash with chained "import as" * fix crash on numpy * fix potential InfiniteRecursion error with builtin objects * include patch from Marien Zwart fixing some test / py 2.5 * be more error resilient when accessing living objects from external code in the manager What's New in astroid 0.17.0? ============================= Release Date: 2007-02-22 * api change to be able to infer using a context (used to infer function call result only for now) * slightly better inference on astng built from living object by trying to infer dummy nodes (able to infer 'help' builtin for instance) * external attribute definition support * basic math operation inference * new pytype method on possibly inferred node (e.g. module, classes, const...) * fix a living object astng building bug, which was making "open" uninferable * fix lookup of name in method bug (#3289) * fix decorator lookup bug (#3261) What's New in astroid 0.16.3? ============================= Release Date: 2006-11-23 * enhance inference for the subscription notation (motivated by a patch from Amaury) and for unary sub/add What's New in astroid 0.16.2? ============================= Release Date: 2006-11-15 * grrr, fixed python 2.3 incompatibility introduced by generator expression scope handling * upgrade to avoid warnings with logilab-common 0.21.0 (on which now depends so) * backported astutils module from logilab-common What's New in astroid 0.16.1? ============================= Release Date: 2006-09-25 * python 2.5 support, patch provided by Marien Zwart * fix [Class|Module].block_range method (this fixes pylint's inline disabling of messages on classes/modules) * handle class.__bases__ and class.__mro__ (proper metaclass handling still needed though) * drop python2.2 support: remove code that was working around python2.2 * fixed generator expression scope bug * patch transformer to extract correct line information What's New in astroid 0.16.0? ============================= Release Date: 2006-04-19 * fix living object building to consider classes such as property as a class instead of a data descriptor * fix multiple assignment inference which was discarding some solutions * added some line manipulation methods to handle pylint's block messages control feature (Node.last_source_line(), None.block_range(lineno) What's New in astroid 0.15.1? ============================= Release Date: 2006-03-10 * fix avoiding to load everything from living objects... Thanks Amaury! * fix a possible NameError in Instance.infer_call_result What's New in astroid 0.15.0? ============================= Release Date: 2006-03-06 * fix possible infinite recursion on global statements (close #10342) and in various other cases... * fix locals/globals interactions when the global statement is used (close #10434) * multiple inference related bug fixes * associate List, Tuple and Dict and Const nodes to their respective classes * new .ass_type method on assignment related node, returning the assignment type node (Assign, For, ListCompFor, GenExprFor, TryExcept) * more API refactoring... .resolve method has disappeared, now you have .ilookup on every nodes and .getattr/.igetattr on node supporting the attribute protocol * introduced a YES object that may be returned when there is ambiguity on an inference path (typically function call when we don't know arguments value) * builder try to instantiate builtin exceptions subclasses to get their instance attribute What's New in astroid 0.14.0? ============================= Release Date: 2006-01-10 * some major inference improvements and refactoring ! The drawback is the introduction of some non backward compatible change in the API but it's imho much cleaner and powerful now :) * new boolean property .newstyle on Class nodes (implements #10073) * new .import_module method on Module node to help in .resolve refactoring * .instance_attrs has list of assignments to instance attribute dictionary as value instead of one * added missing GenExprIf and GenExprInner nodes, and implements as_string for each generator expression related nodes * specifically catch KeyboardInterrupt to reraise it in some places * fix so that module names are always absolute * fix .resolve on package where a subpackage is imported in the __init__ file * fix a bug regarding construction of Function node from living object with earlier version of python 2.4 * fix a NameError on Import and From self_resolve method * fix a bug occurring when building an astng from a living object with a property * lint fixes What's New in astroid 0.13.1? ============================= Release Date: 2005-11-07 * fix bug on building from living module the same object in encountered more than once time (e.g. builtins.object) (close #10069) * fix bug in Class.ancestors() regarding inner classes (close #10072) * fix .self_resolve() on From and Module nodes to handle package precedence over module (close #10066) * locals dict for package contains __path__ definition (close #10065) * astng provide GenExpr and GenExprFor nodes with python >= 2.4 (close #10063) * fix python2.2 compatibility (close #9922) * link .__contains__ to .has_key on scoped node to speed up execution * remove no more necessary .module_object() method on From and Module nodes * normalize parser.ParserError to SyntaxError with python 2.2 What's New in astroid 0.13.0? ============================= Release Date: 2005-10-21 * .locals and .globals on scoped node handle now a list of references to each assignment statements instead of a single reference to the first assignment statement. * fix bug with manager.astng_from_module_name when a context file is given (notably fix ZODB 3.4 crash with pylint/pyreverse) * fix Compare.as_string method * fix bug with lambda object missing the "type" attribute * some minor refactoring * This package has been extracted from the logilab-common package, which will be kept for some time for backward compatibility but will no longer be maintained (this explains that this package is starting with the 0.13 version number, since the fork occurs with the version released in logilab-common 0.12). astroid-2.0.1/COPYING0000644000076500000240000004310313224376075014775 0ustar claudiustaff00000000000000 GNU GENERAL PUBLIC LICENSE Version 2, June 1991 Copyright (C) 1989, 1991 Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed. Preamble The licenses for most software are designed to take away your freedom to share and change it. By contrast, the GNU General Public License is intended to guarantee your freedom to share and change free software--to make sure the software is free for all its users. This General Public License applies to most of the Free Software Foundation's software and to any other program whose authors commit to using it. (Some other Free Software Foundation software is covered by the GNU Lesser General Public License instead.) You can apply it to your programs, too. When we speak of free software, we are referring to freedom, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for this service if you wish), that you receive source code or can get it if you want it, that you can change the software or use pieces of it in new free programs; and that you know you can do these things. To protect your rights, we need to make restrictions that forbid anyone to deny you these rights or to ask you to surrender the rights. These restrictions translate to certain responsibilities for you if you distribute copies of the software, or if you modify it. For example, if you distribute copies of such a program, whether gratis or for a fee, you must give the recipients all the rights that you have. You must make sure that they, too, receive or can get the source code. And you must show them these terms so they know their rights. We protect your rights with two steps: (1) copyright the software, and (2) offer you this license which gives you legal permission to copy, distribute and/or modify the software. Also, for each author's protection and ours, we want to make certain that everyone understands that there is no warranty for this free software. If the software is modified by someone else and passed on, we want its recipients to know that what they have is not the original, so that any problems introduced by others will not reflect on the original authors' reputations. Finally, any free program is threatened constantly by software patents. We wish to avoid the danger that redistributors of a free program will individually obtain patent licenses, in effect making the program proprietary. To prevent this, we have made it clear that any patent must be licensed for everyone's free use or not licensed at all. The precise terms and conditions for copying, distribution and modification follow. GNU GENERAL PUBLIC LICENSE TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION 0. This License applies to any program or other work which contains a notice placed by the copyright holder saying it may be distributed under the terms of this General Public License. The "Program", below, refers to any such program or work, and a "work based on the Program" means either the Program or any derivative work under copyright law: that is to say, a work containing the Program or a portion of it, either verbatim or with modifications and/or translated into another language. (Hereinafter, translation is included without limitation in the term "modification".) Each licensee is addressed as "you". Activities other than copying, distribution and modification are not covered by this License; they are outside its scope. The act of running the Program is not restricted, and the output from the Program is covered only if its contents constitute a work based on the Program (independent of having been made by running the Program). Whether that is true depends on what the Program does. 1. You may copy and distribute verbatim copies of the Program's source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice and disclaimer of warranty; keep intact all the notices that refer to this License and to the absence of any warranty; and give any other recipients of the Program a copy of this License along with the Program. You may charge a fee for the physical act of transferring a copy, and you may at your option offer warranty protection in exchange for a fee. 2. You may modify your copy or copies of the Program or any portion of it, thus forming a work based on the Program, and copy and distribute such modifications or work under the terms of Section 1 above, provided that you also meet all of these conditions: a) You must cause the modified files to carry prominent notices stating that you changed the files and the date of any change. b) You must cause any work that you distribute or publish, that in whole or in part contains or is derived from the Program or any part thereof, to be licensed as a whole at no charge to all third parties under the terms of this License. c) If the modified program normally reads commands interactively when run, you must cause it, when started running for such interactive use in the most ordinary way, to print or display an announcement including an appropriate copyright notice and a notice that there is no warranty (or else, saying that you provide a warranty) and that users may redistribute the program under these conditions, and telling the user how to view a copy of this License. (Exception: if the Program itself is interactive but does not normally print such an announcement, your work based on the Program is not required to print an announcement.) These requirements apply to the modified work as a whole. If identifiable sections of that work are not derived from the Program, and can be reasonably considered independent and separate works in themselves, then this License, and its terms, do not apply to those sections when you distribute them as separate works. But when you distribute the same sections as part of a whole which is a work based on the Program, the distribution of the whole must be on the terms of this License, whose permissions for other licensees extend to the entire whole, and thus to each and every part regardless of who wrote it. Thus, it is not the intent of this section to claim rights or contest your rights to work written entirely by you; rather, the intent is to exercise the right to control the distribution of derivative or collective works based on the Program. In addition, mere aggregation of another work not based on the Program with the Program (or with a work based on the Program) on a volume of a storage or distribution medium does not bring the other work under the scope of this License. 3. You may copy and distribute the Program (or a work based on it, under Section 2) in object code or executable form under the terms of Sections 1 and 2 above provided that you also do one of the following: a) Accompany it with the complete corresponding machine-readable source code, which must be distributed under the terms of Sections 1 and 2 above on a medium customarily used for software interchange; or, b) Accompany it with a written offer, valid for at least three years, to give any third party, for a charge no more than your cost of physically performing source distribution, a complete machine-readable copy of the corresponding source code, to be distributed under the terms of Sections 1 and 2 above on a medium customarily used for software interchange; or, c) Accompany it with the information you received as to the offer to distribute corresponding source code. (This alternative is allowed only for noncommercial distribution and only if you received the program in object code or executable form with such an offer, in accord with Subsection b above.) The source code for a work means the preferred form of the work for making modifications to it. For an executable work, complete source code means all the source code for all modules it contains, plus any associated interface definition files, plus the scripts used to control compilation and installation of the executable. However, as a special exception, the source code distributed need not include anything that is normally distributed (in either source or binary form) with the major components (compiler, kernel, and so on) of the operating system on which the executable runs, unless that component itself accompanies the executable. If distribution of executable or object code is made by offering access to copy from a designated place, then offering equivalent access to copy the source code from the same place counts as distribution of the source code, even though third parties are not compelled to copy the source along with the object code. 4. You may not copy, modify, sublicense, or distribute the Program except as expressly provided under this License. Any attempt otherwise to copy, modify, sublicense or distribute the Program is void, and will automatically terminate your rights under this License. However, parties who have received copies, or rights, from you under this License will not have their licenses terminated so long as such parties remain in full compliance. 5. You are not required to accept this License, since you have not signed it. However, nothing else grants you permission to modify or distribute the Program or its derivative works. These actions are prohibited by law if you do not accept this License. Therefore, by modifying or distributing the Program (or any work based on the Program), you indicate your acceptance of this License to do so, and all its terms and conditions for copying, distributing or modifying the Program or works based on it. 6. Each time you redistribute the Program (or any work based on the Program), the recipient automatically receives a license from the original licensor to copy, distribute or modify the Program subject to these terms and conditions. You may not impose any further restrictions on the recipients' exercise of the rights granted herein. You are not responsible for enforcing compliance by third parties to this License. 7. If, as a consequence of a court judgment or allegation of patent infringement or for any other reason (not limited to patent issues), conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot distribute so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not distribute the Program at all. For example, if a patent license would not permit royalty-free redistribution of the Program by all those who receive copies directly or indirectly through you, then the only way you could satisfy both it and this License would be to refrain entirely from distribution of the Program. If any portion of this section is held invalid or unenforceable under any particular circumstance, the balance of the section is intended to apply and the section as a whole is intended to apply in other circumstances. It is not the purpose of this section to induce you to infringe any patents or other property right claims or to contest validity of any such claims; this section has the sole purpose of protecting the integrity of the free software distribution system, which is implemented by public license practices. Many people have made generous contributions to the wide range of software distributed through that system in reliance on consistent application of that system; it is up to the author/donor to decide if he or she is willing to distribute software through any other system and a licensee cannot impose that choice. This section is intended to make thoroughly clear what is believed to be a consequence of the rest of this License. 8. If the distribution and/or use of the Program is restricted in certain countries either by patents or by copyrighted interfaces, the original copyright holder who places the Program under this License may add an explicit geographical distribution limitation excluding those countries, so that distribution is permitted only in or among countries not thus excluded. In such case, this License incorporates the limitation as if written in the body of this License. 9. The Free Software Foundation may publish revised and/or new versions of the General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns. Each version is given a distinguishing version number. If the Program specifies a version number of this License which applies to it and "any later version", you have the option of following the terms and conditions either of that version or of any later version published by the Free Software Foundation. If the Program does not specify a version number of this License, you may choose any version ever published by the Free Software Foundation. 10. If you wish to incorporate parts of the Program into other free programs whose distribution conditions are different, write to the author to ask for permission. For software which is copyrighted by the Free Software Foundation, write to the Free Software Foundation; we sometimes make exceptions for this. Our decision will be guided by the two goals of preserving the free status of all derivatives of our free software and of promoting the sharing and reuse of software generally. NO WARRANTY 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. 12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. END OF TERMS AND CONDITIONS How to Apply These Terms to Your New Programs If you develop a new program, and you want it to be of the greatest possible use to the public, the best way to achieve this is to make it free software which everyone can redistribute and change under these terms. To do so, attach the following notices to the program. It is safest to attach them to the start of each source file to most effectively convey the exclusion of warranty; and each file should have at least the "copyright" line and a pointer to where the full notice is found. Copyright (C) This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. Also add information on how to contact you by electronic and paper mail. If the program is interactive, make it output a short notice like this when it starts in an interactive mode: Gnomovision version 69, Copyright (C) year name of author Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'. This is free software, and you are welcome to redistribute it under certain conditions; type `show c' for details. The hypothetical commands `show w' and `show c' should show the appropriate parts of the General Public License. Of course, the commands you use may be called something other than `show w' and `show c'; they could even be mouse-clicks or menu items--whatever suits your program. You should also get your employer (if you work as a programmer) or your school, if any, to sign a "copyright disclaimer" for the program, if necessary. Here is a sample; alter the names: Yoyodyne, Inc., hereby disclaims all copyright interest in the program `Gnomovision' (which makes passes at compilers) written by James Hacker. , 1 April 1989 Ty Coon, President of Vice This General Public License does not permit incorporating your program into proprietary programs. If your program is a subroutine library, you may consider it more useful to permit linking proprietary applications with the library. If this is what you want to do, use the GNU Lesser General Public License instead of this License. astroid-2.0.1/COPYING.LESSER0000644000076500000240000006363713224376075016007 0ustar claudiustaff00000000000000 GNU LESSER GENERAL PUBLIC LICENSE Version 2.1, February 1999 Copyright (C) 1991, 1999 Free Software Foundation, Inc. 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed. [This is the first released version of the Lesser GPL. It also counts as the successor of the GNU Library Public License, version 2, hence the version number 2.1.] Preamble The licenses for most software are designed to take away your freedom to share and change it. By contrast, the GNU General Public Licenses are intended to guarantee your freedom to share and change free software--to make sure the software is free for all its users. This license, the Lesser General Public License, applies to some specially designated software packages--typically libraries--of the Free Software Foundation and other authors who decide to use it. You can use it too, but we suggest you first think carefully about whether this license or the ordinary General Public License is the better strategy to use in any particular case, based on the explanations below. When we speak of free software, we are referring to freedom of use, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for this service if you wish); that you receive source code or can get it if you want it; that you can change the software and use pieces of it in new free programs; and that you are informed that you can do these things. To protect your rights, we need to make restrictions that forbid distributors to deny you these rights or to ask you to surrender these rights. These restrictions translate to certain responsibilities for you if you distribute copies of the library or if you modify it. For example, if you distribute copies of the library, whether gratis or for a fee, you must give the recipients all the rights that we gave you. You must make sure that they, too, receive or can get the source code. If you link other code with the library, you must provide complete object files to the recipients, so that they can relink them with the library after making changes to the library and recompiling it. And you must show them these terms so they know their rights. We protect your rights with a two-step method: (1) we copyright the library, and (2) we offer you this license, which gives you legal permission to copy, distribute and/or modify the library. To protect each distributor, we want to make it very clear that there is no warranty for the free library. Also, if the library is modified by someone else and passed on, the recipients should know that what they have is not the original version, so that the original author's reputation will not be affected by problems that might be introduced by others. Finally, software patents pose a constant threat to the existence of any free program. We wish to make sure that a company cannot effectively restrict the users of a free program by obtaining a restrictive license from a patent holder. Therefore, we insist that any patent license obtained for a version of the library must be consistent with the full freedom of use specified in this license. Most GNU software, including some libraries, is covered by the ordinary GNU General Public License. This license, the GNU Lesser General Public License, applies to certain designated libraries, and is quite different from the ordinary General Public License. We use this license for certain libraries in order to permit linking those libraries into non-free programs. When a program is linked with a library, whether statically or using a shared library, the combination of the two is legally speaking a combined work, a derivative of the original library. The ordinary General Public License therefore permits such linking only if the entire combination fits its criteria of freedom. The Lesser General Public License permits more lax criteria for linking other code with the library. We call this license the "Lesser" General Public License because it does Less to protect the user's freedom than the ordinary General Public License. It also provides other free software developers Less of an advantage over competing non-free programs. These disadvantages are the reason we use the ordinary General Public License for many libraries. However, the Lesser license provides advantages in certain special circumstances. For example, on rare occasions, there may be a special need to encourage the widest possible use of a certain library, so that it becomes a de-facto standard. To achieve this, non-free programs must be allowed to use the library. A more frequent case is that a free library does the same job as widely used non-free libraries. In this case, there is little to gain by limiting the free library to free software only, so we use the Lesser General Public License. In other cases, permission to use a particular library in non-free programs enables a greater number of people to use a large body of free software. For example, permission to use the GNU C Library in non-free programs enables many more people to use the whole GNU operating system, as well as its variant, the GNU/Linux operating system. Although the Lesser General Public License is Less protective of the users' freedom, it does ensure that the user of a program that is linked with the Library has the freedom and the wherewithal to run that program using a modified version of the Library. The precise terms and conditions for copying, distribution and modification follow. Pay close attention to the difference between a "work based on the library" and a "work that uses the library". The former contains code derived from the library, whereas the latter must be combined with the library in order to run. GNU LESSER GENERAL PUBLIC LICENSE TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION 0. This License Agreement applies to any software library or other program which contains a notice placed by the copyright holder or other authorized party saying it may be distributed under the terms of this Lesser General Public License (also called "this License"). Each licensee is addressed as "you". A "library" means a collection of software functions and/or data prepared so as to be conveniently linked with application programs (which use some of those functions and data) to form executables. The "Library", below, refers to any such software library or work which has been distributed under these terms. A "work based on the Library" means either the Library or any derivative work under copyright law: that is to say, a work containing the Library or a portion of it, either verbatim or with modifications and/or translated straightforwardly into another language. (Hereinafter, translation is included without limitation in the term "modification".) "Source code" for a work means the preferred form of the work for making modifications to it. For a library, complete source code means all the source code for all modules it contains, plus any associated interface definition files, plus the scripts used to control compilation and installation of the library. Activities other than copying, distribution and modification are not covered by this License; they are outside its scope. The act of running a program using the Library is not restricted, and output from such a program is covered only if its contents constitute a work based on the Library (independent of the use of the Library in a tool for writing it). Whether that is true depends on what the Library does and what the program that uses the Library does. 1. You may copy and distribute verbatim copies of the Library's complete source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice and disclaimer of warranty; keep intact all the notices that refer to this License and to the absence of any warranty; and distribute a copy of this License along with the Library. You may charge a fee for the physical act of transferring a copy, and you may at your option offer warranty protection in exchange for a fee. 2. You may modify your copy or copies of the Library or any portion of it, thus forming a work based on the Library, and copy and distribute such modifications or work under the terms of Section 1 above, provided that you also meet all of these conditions: a) The modified work must itself be a software library. b) You must cause the files modified to carry prominent notices stating that you changed the files and the date of any change. c) You must cause the whole of the work to be licensed at no charge to all third parties under the terms of this License. d) If a facility in the modified Library refers to a function or a table of data to be supplied by an application program that uses the facility, other than as an argument passed when the facility is invoked, then you must make a good faith effort to ensure that, in the event an application does not supply such function or table, the facility still operates, and performs whatever part of its purpose remains meaningful. (For example, a function in a library to compute square roots has a purpose that is entirely well-defined independent of the application. Therefore, Subsection 2d requires that any application-supplied function or table used by this function must be optional: if the application does not supply it, the square root function must still compute square roots.) These requirements apply to the modified work as a whole. If identifiable sections of that work are not derived from the Library, and can be reasonably considered independent and separate works in themselves, then this License, and its terms, do not apply to those sections when you distribute them as separate works. But when you distribute the same sections as part of a whole which is a work based on the Library, the distribution of the whole must be on the terms of this License, whose permissions for other licensees extend to the entire whole, and thus to each and every part regardless of who wrote it. Thus, it is not the intent of this section to claim rights or contest your rights to work written entirely by you; rather, the intent is to exercise the right to control the distribution of derivative or collective works based on the Library. In addition, mere aggregation of another work not based on the Library with the Library (or with a work based on the Library) on a volume of a storage or distribution medium does not bring the other work under the scope of this License. 3. You may opt to apply the terms of the ordinary GNU General Public License instead of this License to a given copy of the Library. To do this, you must alter all the notices that refer to this License, so that they refer to the ordinary GNU General Public License, version 2, instead of to this License. (If a newer version than version 2 of the ordinary GNU General Public License has appeared, then you can specify that version instead if you wish.) Do not make any other change in these notices. Once this change is made in a given copy, it is irreversible for that copy, so the ordinary GNU General Public License applies to all subsequent copies and derivative works made from that copy. This option is useful when you wish to copy part of the code of the Library into a program that is not a library. 4. You may copy and distribute the Library (or a portion or derivative of it, under Section 2) in object code or executable form under the terms of Sections 1 and 2 above provided that you accompany it with the complete corresponding machine-readable source code, which must be distributed under the terms of Sections 1 and 2 above on a medium customarily used for software interchange. If distribution of object code is made by offering access to copy from a designated place, then offering equivalent access to copy the source code from the same place satisfies the requirement to distribute the source code, even though third parties are not compelled to copy the source along with the object code. 5. A program that contains no derivative of any portion of the Library, but is designed to work with the Library by being compiled or linked with it, is called a "work that uses the Library". Such a work, in isolation, is not a derivative work of the Library, and therefore falls outside the scope of this License. However, linking a "work that uses the Library" with the Library creates an executable that is a derivative of the Library (because it contains portions of the Library), rather than a "work that uses the library". The executable is therefore covered by this License. Section 6 states terms for distribution of such executables. When a "work that uses the Library" uses material from a header file that is part of the Library, the object code for the work may be a derivative work of the Library even though the source code is not. Whether this is true is especially significant if the work can be linked without the Library, or if the work is itself a library. The threshold for this to be true is not precisely defined by law. If such an object file uses only numerical parameters, data structure layouts and accessors, and small macros and small inline functions (ten lines or less in length), then the use of the object file is unrestricted, regardless of whether it is legally a derivative work. (Executables containing this object code plus portions of the Library will still fall under Section 6.) Otherwise, if the work is a derivative of the Library, you may distribute the object code for the work under the terms of Section 6. Any executables containing that work also fall under Section 6, whether or not they are linked directly with the Library itself. 6. As an exception to the Sections above, you may also combine or link a "work that uses the Library" with the Library to produce a work containing portions of the Library, and distribute that work under terms of your choice, provided that the terms permit modification of the work for the customer's own use and reverse engineering for debugging such modifications. You must give prominent notice with each copy of the work that the Library is used in it and that the Library and its use are covered by this License. You must supply a copy of this License. If the work during execution displays copyright notices, you must include the copyright notice for the Library among them, as well as a reference directing the user to the copy of this License. Also, you must do one of these things: a) Accompany the work with the complete corresponding machine-readable source code for the Library including whatever changes were used in the work (which must be distributed under Sections 1 and 2 above); and, if the work is an executable linked with the Library, with the complete machine-readable "work that uses the Library", as object code and/or source code, so that the user can modify the Library and then relink to produce a modified executable containing the modified Library. (It is understood that the user who changes the contents of definitions files in the Library will not necessarily be able to recompile the application to use the modified definitions.) b) Use a suitable shared library mechanism for linking with the Library. A suitable mechanism is one that (1) uses at run time a copy of the library already present on the user's computer system, rather than copying library functions into the executable, and (2) will operate properly with a modified version of the library, if the user installs one, as long as the modified version is interface-compatible with the version that the work was made with. c) Accompany the work with a written offer, valid for at least three years, to give the same user the materials specified in Subsection 6a, above, for a charge no more than the cost of performing this distribution. d) If distribution of the work is made by offering access to copy from a designated place, offer equivalent access to copy the above specified materials from the same place. e) Verify that the user has already received a copy of these materials or that you have already sent this user a copy. For an executable, the required form of the "work that uses the Library" must include any data and utility programs needed for reproducing the executable from it. However, as a special exception, the materials to be distributed need not include anything that is normally distributed (in either source or binary form) with the major components (compiler, kernel, and so on) of the operating system on which the executable runs, unless that component itself accompanies the executable. It may happen that this requirement contradicts the license restrictions of other proprietary libraries that do not normally accompany the operating system. Such a contradiction means you cannot use both them and the Library together in an executable that you distribute. 7. You may place library facilities that are a work based on the Library side-by-side in a single library together with other library facilities not covered by this License, and distribute such a combined library, provided that the separate distribution of the work based on the Library and of the other library facilities is otherwise permitted, and provided that you do these two things: a) Accompany the combined library with a copy of the same work based on the Library, uncombined with any other library facilities. This must be distributed under the terms of the Sections above. b) Give prominent notice with the combined library of the fact that part of it is a work based on the Library, and explaining where to find the accompanying uncombined form of the same work. 8. You may not copy, modify, sublicense, link with, or distribute the Library except as expressly provided under this License. Any attempt otherwise to copy, modify, sublicense, link with, or distribute the Library is void, and will automatically terminate your rights under this License. However, parties who have received copies, or rights, from you under this License will not have their licenses terminated so long as such parties remain in full compliance. 9. You are not required to accept this License, since you have not signed it. However, nothing else grants you permission to modify or distribute the Library or its derivative works. These actions are prohibited by law if you do not accept this License. Therefore, by modifying or distributing the Library (or any work based on the Library), you indicate your acceptance of this License to do so, and all its terms and conditions for copying, distributing or modifying the Library or works based on it. 10. Each time you redistribute the Library (or any work based on the Library), the recipient automatically receives a license from the original licensor to copy, distribute, link with or modify the Library subject to these terms and conditions. You may not impose any further restrictions on the recipients' exercise of the rights granted herein. You are not responsible for enforcing compliance by third parties with this License. 11. If, as a consequence of a court judgment or allegation of patent infringement or for any other reason (not limited to patent issues), conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot distribute so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not distribute the Library at all. For example, if a patent license would not permit royalty-free redistribution of the Library by all those who receive copies directly or indirectly through you, then the only way you could satisfy both it and this License would be to refrain entirely from distribution of the Library. If any portion of this section is held invalid or unenforceable under any particular circumstance, the balance of the section is intended to apply, and the section as a whole is intended to apply in other circumstances. It is not the purpose of this section to induce you to infringe any patents or other property right claims or to contest validity of any such claims; this section has the sole purpose of protecting the integrity of the free software distribution system which is implemented by public license practices. Many people have made generous contributions to the wide range of software distributed through that system in reliance on consistent application of that system; it is up to the author/donor to decide if he or she is willing to distribute software through any other system and a licensee cannot impose that choice. This section is intended to make thoroughly clear what is believed to be a consequence of the rest of this License. 12. If the distribution and/or use of the Library is restricted in certain countries either by patents or by copyrighted interfaces, the original copyright holder who places the Library under this License may add an explicit geographical distribution limitation excluding those countries, so that distribution is permitted only in or among countries not thus excluded. In such case, this License incorporates the limitation as if written in the body of this License. 13. The Free Software Foundation may publish revised and/or new versions of the Lesser General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns. Each version is given a distinguishing version number. If the Library specifies a version number of this License which applies to it and "any later version", you have the option of following the terms and conditions either of that version or of any later version published by the Free Software Foundation. If the Library does not specify a license version number, you may choose any version ever published by the Free Software Foundation. 14. If you wish to incorporate parts of the Library into other free programs whose distribution conditions are incompatible with these, write to the author to ask for permission. For software which is copyrighted by the Free Software Foundation, write to the Free Software Foundation; we sometimes make exceptions for this. Our decision will be guided by the two goals of preserving the free status of all derivatives of our free software and of promoting the sharing and reuse of software generally. NO WARRANTY 15. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE LIBRARY "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE LIBRARY IS WITH YOU. SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. 16. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. END OF TERMS AND CONDITIONS How to Apply These Terms to Your New Libraries If you develop a new library, and you want it to be of the greatest possible use to the public, we recommend making it free software that everyone can redistribute and change. You can do so by permitting redistribution under these terms (or, alternatively, under the terms of the ordinary General Public License). To apply these terms, attach the following notices to the library. It is safest to attach them to the start of each source file to most effectively convey the exclusion of warranty; and each file should have at least the "copyright" line and a pointer to where the full notice is found. Copyright (C) This library is free software; you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation; either version 2.1 of the License, or (at your option) any later version. This library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. You should have received a copy of the GNU Lesser General Public License along with this library; if not, write to the Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA Also add information on how to contact you by electronic and paper mail. You should also get your employer (if you work as a programmer) or your school, if any, to sign a "copyright disclaimer" for the library, if necessary. Here is a sample; alter the names: Yoyodyne, Inc., hereby disclaims all copyright interest in the library `Frob' (a library for tweaking knobs) written by James Random Hacker. , 1 April 1990 Ty Coon, President of Vice That's all there is to it! astroid-2.0.1/MANIFEST.in0000644000076500000240000000027313316617574015505 0ustar claudiustaff00000000000000include ChangeLog include README.rst include COPYING include COPYING.LESSER include pytest.ini recursive-include astroid/tests *.py *.zip *.egg *.pth recursive-include astroid/brain *.py astroid-2.0.1/PKG-INFO0000644000076500000240000000645013324065077015041 0ustar claudiustaff00000000000000Metadata-Version: 1.2 Name: astroid Version: 2.0.1 Summary: A abstract syntax tree for Python with inference support. Home-page: https://github.com/PyCQA/astroid Author: Python Code Quality Authority Author-email: code-quality@python.org License: LGPL Description: Astroid ======= .. image:: https://travis-ci.org/PyCQA/astroid.svg?branch=master :target: https://travis-ci.org/PyCQA/astroid .. image:: https://ci.appveyor.com/api/projects/status/co3u42kunguhbh6l/branch/master?svg=true :alt: AppVeyor Build Status :target: https://ci.appveyor.com/project/PCManticore/astroid .. image:: https://coveralls.io/repos/github/PyCQA/astroid/badge.svg?branch=master :target: https://coveralls.io/github/PyCQA/astroid?branch=master .. image:: https://readthedocs.org/projects/astroid/badge/?version=latest :target: http://astroid.readthedocs.io/en/latest/?badge=latest :alt: Documentation Status What's this? ------------ The aim of this module is to provide a common base representation of python source code. It is currently the powering pylint's capabilities. It provides a compatible representation which comes from the `_ast` module. It rebuilds the tree generated by the builtin _ast module by recursively walking down the AST and building an extended ast. The new node classes have additional methods and attributes for different usages. They include some support for static inference and local name scopes. Furthermore, astroid can also build partial trees by inspecting living objects. Installation ------------ Extract the tarball, jump into the created directory and run:: pip install . If you want to do an editable installation, you can run:: pip install -e . If you have any questions, please mail the code-quality@python.org mailing list for support. See http://mail.python.org/mailman/listinfo/code-quality for subscription information and archives. Python Versions --------------- astroid 2.0 is currently available for Python 3 only. If you want Python 2 support, older versions of astroid will still supported until 2020. Test ---- Tests are in the 'test' subdirectory. To launch the whole tests suite, you can use either `tox` or `pytest`:: tox pytest astroid Platform: UNKNOWN Classifier: Topic :: Software Development :: Libraries :: Python Modules Classifier: Topic :: Software Development :: Quality Assurance Classifier: Programming Language :: Python Classifier: Programming Language :: Python :: 3 Classifier: Programming Language :: Python :: 3.4 Classifier: Programming Language :: Python :: 3.5 Classifier: Programming Language :: Python :: 3.6 Classifier: Programming Language :: Python :: 3.7 Classifier: Programming Language :: Python :: Implementation :: CPython Classifier: Programming Language :: Python :: Implementation :: PyPy Requires-Python: >=3.4.* astroid-2.0.1/pytest.ini0000644000076500000240000000007513316617575016001 0ustar claudiustaff00000000000000[pytest] python_files=*test_*.py addopts=-m "not acceptance" astroid-2.0.1/README.rst0000644000076500000240000000365313316617574015443 0ustar claudiustaff00000000000000Astroid ======= .. image:: https://travis-ci.org/PyCQA/astroid.svg?branch=master :target: https://travis-ci.org/PyCQA/astroid .. image:: https://ci.appveyor.com/api/projects/status/co3u42kunguhbh6l/branch/master?svg=true :alt: AppVeyor Build Status :target: https://ci.appveyor.com/project/PCManticore/astroid .. image:: https://coveralls.io/repos/github/PyCQA/astroid/badge.svg?branch=master :target: https://coveralls.io/github/PyCQA/astroid?branch=master .. image:: https://readthedocs.org/projects/astroid/badge/?version=latest :target: http://astroid.readthedocs.io/en/latest/?badge=latest :alt: Documentation Status What's this? ------------ The aim of this module is to provide a common base representation of python source code. It is currently the powering pylint's capabilities. It provides a compatible representation which comes from the `_ast` module. It rebuilds the tree generated by the builtin _ast module by recursively walking down the AST and building an extended ast. The new node classes have additional methods and attributes for different usages. They include some support for static inference and local name scopes. Furthermore, astroid can also build partial trees by inspecting living objects. Installation ------------ Extract the tarball, jump into the created directory and run:: pip install . If you want to do an editable installation, you can run:: pip install -e . If you have any questions, please mail the code-quality@python.org mailing list for support. See http://mail.python.org/mailman/listinfo/code-quality for subscription information and archives. Python Versions --------------- astroid 2.0 is currently available for Python 3 only. If you want Python 2 support, older versions of astroid will still supported until 2020. Test ---- Tests are in the 'test' subdirectory. To launch the whole tests suite, you can use either `tox` or `pytest`:: tox pytest astroid astroid-2.0.1/setup.cfg0000644000076500000240000000015013324065077015554 0ustar claudiustaff00000000000000[aliases] test = pytest [tool:pytest] testpaths = astroid/tests [egg_info] tag_build = tag_date = 0 astroid-2.0.1/setup.py0000644000076500000240000000360313322572571015453 0ustar claudiustaff00000000000000#!/usr/bin/env python # Copyright (c) 2006, 2009-2010, 2012-2013 LOGILAB S.A. (Paris, FRANCE) # Copyright (c) 2010-2011 Julien Jehannet # Copyright (c) 2014-2016, 2018 Claudiu Popa # Copyright (c) 2014 Google, Inc. # Copyright (c) 2017 Hugo # Copyright (c) 2018 Ashley Whetter # Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html # For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER # pylint: disable=W0404,W0622,W0704,W0613 """Setup script for astroid.""" import os from setuptools import setup, find_packages from setuptools.command import easy_install from setuptools.command import install_lib real_path = os.path.realpath(__file__) astroid_dir = os.path.dirname(real_path) pkginfo = os.path.join(astroid_dir, 'astroid', '__pkginfo__.py') with open(pkginfo, 'rb') as fobj: exec(compile(fobj.read(), pkginfo, 'exec'), locals()) with open(os.path.join(astroid_dir, 'README.rst')) as fobj: long_description = fobj.read() def install(): return setup(name = distname, version = version, license = license, description = description, long_description = long_description, classifiers = classifiers, author = author, author_email = author_email, url = web, python_requires='>=3.4.*', install_requires = install_requires, extras_require=extras_require, packages=find_packages(exclude=['astroid.tests']) + ['astroid.brain'], setup_requires=['pytest-runner'], test_suite='test', tests_require=['pytest'], ) if __name__ == '__main__' : install()