adding new stuff
This commit is contained in:
parent
f84d7183aa
commit
9ef8a96f9a
1580 changed files with 0 additions and 0 deletions
131
plugins/bundle/python-mode/pymode/libs/astroid/__init__.py
Normal file
131
plugins/bundle/python-mode/pymode/libs/astroid/__init__.py
Normal file
|
|
@ -0,0 +1,131 @@
|
|||
# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
|
||||
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
|
||||
#
|
||||
# This file is part of astroid.
|
||||
#
|
||||
# astroid is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by the
|
||||
# Free Software Foundation, either version 2.1 of the License, or (at your
|
||||
# option) any later version.
|
||||
#
|
||||
# astroid is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
|
||||
# for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License along
|
||||
# with astroid. If not, see <http://www.gnu.org/licenses/>.
|
||||
"""Python Abstract Syntax Tree New Generation
|
||||
|
||||
The aim of this module is to provide a common base representation of
|
||||
python source code for projects such as pychecker, pyreverse,
|
||||
pylint... Well, actually the development of this library is essentially
|
||||
governed by pylint's needs.
|
||||
|
||||
It extends class defined in the python's _ast module with some
|
||||
additional methods and attributes. Instance attributes are added by a
|
||||
builder object, which can either generate extended ast (let's call
|
||||
them astroid ;) by visiting an existent ast tree or by inspecting living
|
||||
object. Methods are added by monkey patching ast classes.
|
||||
|
||||
Main modules are:
|
||||
|
||||
* nodes and scoped_nodes for more information about methods and
|
||||
attributes added to different node classes
|
||||
|
||||
* the manager contains a high level object to get astroid trees from
|
||||
source files and living objects. It maintains a cache of previously
|
||||
constructed tree for quick access
|
||||
|
||||
* builder contains the class responsible to build astroid trees
|
||||
"""
|
||||
__doctype__ = "restructuredtext en"
|
||||
|
||||
import sys
|
||||
import re
|
||||
from operator import attrgetter
|
||||
|
||||
# WARNING: internal imports order matters !
|
||||
|
||||
# make all exception classes accessible from astroid package
|
||||
from astroid.exceptions import *
|
||||
|
||||
# make all node classes accessible from astroid package
|
||||
from astroid.nodes import *
|
||||
|
||||
# trigger extra monkey-patching
|
||||
from astroid import inference
|
||||
|
||||
# more stuff available
|
||||
from astroid import raw_building
|
||||
from astroid.bases import YES, Instance, BoundMethod, UnboundMethod
|
||||
from astroid.node_classes import are_exclusive, unpack_infer
|
||||
from astroid.scoped_nodes import builtin_lookup
|
||||
|
||||
# make a manager instance (borg) as well as Project and Package classes
|
||||
# accessible from astroid package
|
||||
from astroid.manager import AstroidManager, Project
|
||||
MANAGER = AstroidManager()
|
||||
del AstroidManager
|
||||
|
||||
# transform utilities (filters and decorator)
|
||||
|
||||
class AsStringRegexpPredicate(object):
|
||||
"""Class to be used as predicate that may be given to `register_transform`
|
||||
|
||||
First argument is a regular expression that will be searched against the `as_string`
|
||||
representation of the node onto which it's applied.
|
||||
|
||||
If specified, the second argument is an `attrgetter` expression that will be
|
||||
applied on the node first to get the actual node on which `as_string` should
|
||||
be called.
|
||||
|
||||
WARNING: This can be fairly slow, as it has to convert every AST node back
|
||||
to Python code; you should consider examining the AST directly instead.
|
||||
"""
|
||||
def __init__(self, regexp, expression=None):
|
||||
self.regexp = re.compile(regexp)
|
||||
self.expression = expression
|
||||
|
||||
def __call__(self, node):
|
||||
if self.expression is not None:
|
||||
node = attrgetter(self.expression)(node)
|
||||
return self.regexp.search(node.as_string())
|
||||
|
||||
def inference_tip(infer_function):
|
||||
"""Given an instance specific inference function, return a function to be
|
||||
given to MANAGER.register_transform to set this inference function.
|
||||
|
||||
Typical usage
|
||||
|
||||
.. sourcecode:: python
|
||||
|
||||
MANAGER.register_transform(CallFunc, inference_tip(infer_named_tuple),
|
||||
predicate)
|
||||
"""
|
||||
def transform(node, infer_function=infer_function):
|
||||
node._explicit_inference = infer_function
|
||||
return node
|
||||
return transform
|
||||
|
||||
|
||||
def register_module_extender(manager, module_name, get_extension_mod):
|
||||
def transform(node):
|
||||
extension_module = get_extension_mod()
|
||||
for name, obj in extension_module.locals.items():
|
||||
node.locals[name] = obj
|
||||
|
||||
manager.register_transform(Module, transform, lambda n: n.name == module_name)
|
||||
|
||||
|
||||
# load brain plugins
|
||||
from os import listdir
|
||||
from os.path import join, dirname
|
||||
BRAIN_MODULES_DIR = join(dirname(__file__), 'brain')
|
||||
if BRAIN_MODULES_DIR not in sys.path:
|
||||
# add it to the end of the list so user path take precedence
|
||||
sys.path.append(BRAIN_MODULES_DIR)
|
||||
# load modules in this directory
|
||||
for module in listdir(BRAIN_MODULES_DIR):
|
||||
if module.endswith('.py'):
|
||||
__import__(module[:-3])
|
||||
|
|
@ -0,0 +1,42 @@
|
|||
# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
|
||||
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
|
||||
#
|
||||
# This file is part of astroid.
|
||||
#
|
||||
# astroid is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by the
|
||||
# Free Software Foundation, either version 2.1 of the License, or (at your
|
||||
# option) any later version.
|
||||
#
|
||||
# astroid is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
|
||||
# for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License along
|
||||
# with astroid. If not, see <http://www.gnu.org/licenses/>.
|
||||
"""astroid packaging information"""
|
||||
distname = 'astroid'
|
||||
|
||||
modname = 'astroid'
|
||||
|
||||
numversion = (1, 3, 8)
|
||||
version = '.'.join([str(num) for num in numversion])
|
||||
|
||||
install_requires = ['logilab-common>=0.63.0', 'six']
|
||||
|
||||
license = 'LGPL'
|
||||
|
||||
author = 'Logilab'
|
||||
author_email = 'pylint-dev@lists.logilab.org'
|
||||
mailinglist = "mailto://%s" % author_email
|
||||
web = 'http://bitbucket.org/logilab/astroid'
|
||||
|
||||
description = "A abstract syntax tree for Python with inference support."
|
||||
|
||||
classifiers = ["Topic :: Software Development :: Libraries :: Python Modules",
|
||||
"Topic :: Software Development :: Quality Assurance",
|
||||
"Programming Language :: Python",
|
||||
"Programming Language :: Python :: 2",
|
||||
"Programming Language :: Python :: 3",
|
||||
]
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
499
plugins/bundle/python-mode/pymode/libs/astroid/as_string.py
Normal file
499
plugins/bundle/python-mode/pymode/libs/astroid/as_string.py
Normal file
|
|
@ -0,0 +1,499 @@
|
|||
# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
|
||||
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
|
||||
#
|
||||
# This file is part of astroid.
|
||||
#
|
||||
# astroid is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by the
|
||||
# Free Software Foundation, either version 2.1 of the License, or (at your
|
||||
# option) any later version.
|
||||
#
|
||||
# astroid is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
|
||||
# for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License along
|
||||
# with astroid. If not, see <http://www.gnu.org/licenses/>.
|
||||
"""This module renders Astroid nodes as string:
|
||||
|
||||
* :func:`to_code` function return equivalent (hopefuly valid) python string
|
||||
|
||||
* :func:`dump` function return an internal representation of nodes found
|
||||
in the tree, useful for debugging or understanding the tree structure
|
||||
"""
|
||||
|
||||
import sys
|
||||
|
||||
INDENT = ' ' # 4 spaces ; keep indentation variable
|
||||
|
||||
|
||||
def dump(node, ids=False):
|
||||
"""print a nice astroid tree representation.
|
||||
|
||||
:param ids: if true, we also print the ids (usefull for debugging)
|
||||
"""
|
||||
result = []
|
||||
_repr_tree(node, result, ids=ids)
|
||||
return "\n".join(result)
|
||||
|
||||
def _repr_tree(node, result, indent='', _done=None, ids=False):
|
||||
"""built a tree representation of a node as a list of lines"""
|
||||
if _done is None:
|
||||
_done = set()
|
||||
if not hasattr(node, '_astroid_fields'): # not a astroid node
|
||||
return
|
||||
if node in _done:
|
||||
result.append(indent + 'loop in tree: %s' % node)
|
||||
return
|
||||
_done.add(node)
|
||||
node_str = str(node)
|
||||
if ids:
|
||||
node_str += ' . \t%x' % id(node)
|
||||
result.append(indent + node_str)
|
||||
indent += INDENT
|
||||
for field in node._astroid_fields:
|
||||
value = getattr(node, field)
|
||||
if isinstance(value, (list, tuple)):
|
||||
result.append(indent + field + " = [")
|
||||
for child in value:
|
||||
if isinstance(child, (list, tuple)):
|
||||
# special case for Dict # FIXME
|
||||
_repr_tree(child[0], result, indent, _done, ids)
|
||||
_repr_tree(child[1], result, indent, _done, ids)
|
||||
result.append(indent + ',')
|
||||
else:
|
||||
_repr_tree(child, result, indent, _done, ids)
|
||||
result.append(indent + "]")
|
||||
else:
|
||||
result.append(indent + field + " = ")
|
||||
_repr_tree(value, result, indent, _done, ids)
|
||||
|
||||
|
||||
class AsStringVisitor(object):
|
||||
"""Visitor to render an Astroid node as a valid python code string"""
|
||||
|
||||
def __call__(self, node):
|
||||
"""Makes this visitor behave as a simple function"""
|
||||
return node.accept(self)
|
||||
|
||||
def _stmt_list(self, stmts):
|
||||
"""return a list of nodes to string"""
|
||||
stmts = '\n'.join([nstr for nstr in [n.accept(self) for n in stmts] if nstr])
|
||||
return INDENT + stmts.replace('\n', '\n'+INDENT)
|
||||
|
||||
|
||||
## visit_<node> methods ###########################################
|
||||
|
||||
def visit_arguments(self, node):
|
||||
"""return an astroid.Function node as string"""
|
||||
return node.format_args()
|
||||
|
||||
def visit_assattr(self, node):
|
||||
"""return an astroid.AssAttr node as string"""
|
||||
return self.visit_getattr(node)
|
||||
|
||||
def visit_assert(self, node):
|
||||
"""return an astroid.Assert node as string"""
|
||||
if node.fail:
|
||||
return 'assert %s, %s' % (node.test.accept(self),
|
||||
node.fail.accept(self))
|
||||
return 'assert %s' % node.test.accept(self)
|
||||
|
||||
def visit_assname(self, node):
|
||||
"""return an astroid.AssName node as string"""
|
||||
return node.name
|
||||
|
||||
def visit_assign(self, node):
|
||||
"""return an astroid.Assign node as string"""
|
||||
lhs = ' = '.join([n.accept(self) for n in node.targets])
|
||||
return '%s = %s' % (lhs, node.value.accept(self))
|
||||
|
||||
def visit_augassign(self, node):
|
||||
"""return an astroid.AugAssign node as string"""
|
||||
return '%s %s %s' % (node.target.accept(self), node.op, node.value.accept(self))
|
||||
|
||||
def visit_backquote(self, node):
|
||||
"""return an astroid.Backquote node as string"""
|
||||
return '`%s`' % node.value.accept(self)
|
||||
|
||||
def visit_binop(self, node):
|
||||
"""return an astroid.BinOp node as string"""
|
||||
return '(%s) %s (%s)' % (node.left.accept(self), node.op, node.right.accept(self))
|
||||
|
||||
def visit_boolop(self, node):
|
||||
"""return an astroid.BoolOp node as string"""
|
||||
return (' %s ' % node.op).join(['(%s)' % n.accept(self)
|
||||
for n in node.values])
|
||||
|
||||
def visit_break(self, node):
|
||||
"""return an astroid.Break node as string"""
|
||||
return 'break'
|
||||
|
||||
def visit_callfunc(self, node):
|
||||
"""return an astroid.CallFunc node as string"""
|
||||
expr_str = node.func.accept(self)
|
||||
args = [arg.accept(self) for arg in node.args]
|
||||
if node.starargs:
|
||||
args.append('*' + node.starargs.accept(self))
|
||||
if node.kwargs:
|
||||
args.append('**' + node.kwargs.accept(self))
|
||||
return '%s(%s)' % (expr_str, ', '.join(args))
|
||||
|
||||
def visit_class(self, node):
|
||||
"""return an astroid.Class node as string"""
|
||||
decorate = node.decorators and node.decorators.accept(self) or ''
|
||||
bases = ', '.join([n.accept(self) for n in node.bases])
|
||||
if sys.version_info[0] == 2:
|
||||
bases = bases and '(%s)' % bases or ''
|
||||
else:
|
||||
metaclass = node.metaclass()
|
||||
if metaclass and not node.has_metaclass_hack():
|
||||
if bases:
|
||||
bases = '(%s, metaclass=%s)' % (bases, metaclass.name)
|
||||
else:
|
||||
bases = '(metaclass=%s)' % metaclass.name
|
||||
else:
|
||||
bases = bases and '(%s)' % bases or ''
|
||||
docs = node.doc and '\n%s"""%s"""' % (INDENT, node.doc) or ''
|
||||
return '\n\n%sclass %s%s:%s\n%s\n' % (decorate, node.name, bases, docs,
|
||||
self._stmt_list(node.body))
|
||||
|
||||
def visit_compare(self, node):
|
||||
"""return an astroid.Compare node as string"""
|
||||
rhs_str = ' '.join(['%s %s' % (op, expr.accept(self))
|
||||
for op, expr in node.ops])
|
||||
return '%s %s' % (node.left.accept(self), rhs_str)
|
||||
|
||||
def visit_comprehension(self, node):
|
||||
"""return an astroid.Comprehension node as string"""
|
||||
ifs = ''.join([' if %s' % n.accept(self) for n in node.ifs])
|
||||
return 'for %s in %s%s' % (node.target.accept(self),
|
||||
node.iter.accept(self), ifs)
|
||||
|
||||
def visit_const(self, node):
|
||||
"""return an astroid.Const node as string"""
|
||||
return repr(node.value)
|
||||
|
||||
def visit_continue(self, node):
|
||||
"""return an astroid.Continue node as string"""
|
||||
return 'continue'
|
||||
|
||||
def visit_delete(self, node): # XXX check if correct
|
||||
"""return an astroid.Delete node as string"""
|
||||
return 'del %s' % ', '.join([child.accept(self)
|
||||
for child in node.targets])
|
||||
|
||||
def visit_delattr(self, node):
|
||||
"""return an astroid.DelAttr node as string"""
|
||||
return self.visit_getattr(node)
|
||||
|
||||
def visit_delname(self, node):
|
||||
"""return an astroid.DelName node as string"""
|
||||
return node.name
|
||||
|
||||
def visit_decorators(self, node):
|
||||
"""return an astroid.Decorators node as string"""
|
||||
return '@%s\n' % '\n@'.join([item.accept(self) for item in node.nodes])
|
||||
|
||||
def visit_dict(self, node):
|
||||
"""return an astroid.Dict node as string"""
|
||||
return '{%s}' % ', '.join(['%s: %s' % (key.accept(self),
|
||||
value.accept(self))
|
||||
for key, value in node.items])
|
||||
|
||||
def visit_dictcomp(self, node):
|
||||
"""return an astroid.DictComp node as string"""
|
||||
return '{%s: %s %s}' % (node.key.accept(self), node.value.accept(self),
|
||||
' '.join([n.accept(self) for n in node.generators]))
|
||||
|
||||
def visit_discard(self, node):
|
||||
"""return an astroid.Discard node as string"""
|
||||
return node.value.accept(self)
|
||||
|
||||
def visit_emptynode(self, node):
|
||||
"""dummy method for visiting an Empty node"""
|
||||
return ''
|
||||
|
||||
def visit_excepthandler(self, node):
|
||||
if node.type:
|
||||
if node.name:
|
||||
excs = 'except %s, %s' % (node.type.accept(self),
|
||||
node.name.accept(self))
|
||||
else:
|
||||
excs = 'except %s' % node.type.accept(self)
|
||||
else:
|
||||
excs = 'except'
|
||||
return '%s:\n%s' % (excs, self._stmt_list(node.body))
|
||||
|
||||
def visit_ellipsis(self, node):
|
||||
"""return an astroid.Ellipsis node as string"""
|
||||
return '...'
|
||||
|
||||
def visit_empty(self, node):
|
||||
"""return an Empty node as string"""
|
||||
return ''
|
||||
|
||||
def visit_exec(self, node):
|
||||
"""return an astroid.Exec node as string"""
|
||||
if node.locals:
|
||||
return 'exec %s in %s, %s' % (node.expr.accept(self),
|
||||
node.locals.accept(self),
|
||||
node.globals.accept(self))
|
||||
if node.globals:
|
||||
return 'exec %s in %s' % (node.expr.accept(self),
|
||||
node.globals.accept(self))
|
||||
return 'exec %s' % node.expr.accept(self)
|
||||
|
||||
def visit_extslice(self, node):
|
||||
"""return an astroid.ExtSlice node as string"""
|
||||
return ','.join([dim.accept(self) for dim in node.dims])
|
||||
|
||||
def visit_for(self, node):
|
||||
"""return an astroid.For node as string"""
|
||||
fors = 'for %s in %s:\n%s' % (node.target.accept(self),
|
||||
node.iter.accept(self),
|
||||
self._stmt_list(node.body))
|
||||
if node.orelse:
|
||||
fors = '%s\nelse:\n%s' % (fors, self._stmt_list(node.orelse))
|
||||
return fors
|
||||
|
||||
def visit_from(self, node):
|
||||
"""return an astroid.From node as string"""
|
||||
return 'from %s import %s' % ('.' * (node.level or 0) + node.modname,
|
||||
_import_string(node.names))
|
||||
|
||||
def visit_function(self, node):
|
||||
"""return an astroid.Function node as string"""
|
||||
decorate = node.decorators and node.decorators.accept(self) or ''
|
||||
docs = node.doc and '\n%s"""%s"""' % (INDENT, node.doc) or ''
|
||||
return '\n%sdef %s(%s):%s\n%s' % (decorate, node.name, node.args.accept(self),
|
||||
docs, self._stmt_list(node.body))
|
||||
|
||||
def visit_genexpr(self, node):
|
||||
"""return an astroid.GenExpr node as string"""
|
||||
return '(%s %s)' % (node.elt.accept(self),
|
||||
' '.join([n.accept(self) for n in node.generators]))
|
||||
|
||||
def visit_getattr(self, node):
|
||||
"""return an astroid.Getattr node as string"""
|
||||
return '%s.%s' % (node.expr.accept(self), node.attrname)
|
||||
|
||||
def visit_global(self, node):
|
||||
"""return an astroid.Global node as string"""
|
||||
return 'global %s' % ', '.join(node.names)
|
||||
|
||||
def visit_if(self, node):
|
||||
"""return an astroid.If node as string"""
|
||||
ifs = ['if %s:\n%s' % (node.test.accept(self), self._stmt_list(node.body))]
|
||||
if node.orelse:# XXX use elif ???
|
||||
ifs.append('else:\n%s' % self._stmt_list(node.orelse))
|
||||
return '\n'.join(ifs)
|
||||
|
||||
def visit_ifexp(self, node):
|
||||
"""return an astroid.IfExp node as string"""
|
||||
return '%s if %s else %s' % (node.body.accept(self),
|
||||
node.test.accept(self),
|
||||
node.orelse.accept(self))
|
||||
|
||||
def visit_import(self, node):
|
||||
"""return an astroid.Import node as string"""
|
||||
return 'import %s' % _import_string(node.names)
|
||||
|
||||
def visit_keyword(self, node):
|
||||
"""return an astroid.Keyword node as string"""
|
||||
return '%s=%s' % (node.arg, node.value.accept(self))
|
||||
|
||||
def visit_lambda(self, node):
|
||||
"""return an astroid.Lambda node as string"""
|
||||
return 'lambda %s: %s' % (node.args.accept(self),
|
||||
node.body.accept(self))
|
||||
|
||||
def visit_list(self, node):
|
||||
"""return an astroid.List node as string"""
|
||||
return '[%s]' % ', '.join([child.accept(self) for child in node.elts])
|
||||
|
||||
def visit_listcomp(self, node):
|
||||
"""return an astroid.ListComp node as string"""
|
||||
return '[%s %s]' % (node.elt.accept(self),
|
||||
' '.join([n.accept(self) for n in node.generators]))
|
||||
|
||||
def visit_module(self, node):
|
||||
"""return an astroid.Module node as string"""
|
||||
docs = node.doc and '"""%s"""\n\n' % node.doc or ''
|
||||
return docs + '\n'.join([n.accept(self) for n in node.body]) + '\n\n'
|
||||
|
||||
def visit_name(self, node):
|
||||
"""return an astroid.Name node as string"""
|
||||
return node.name
|
||||
|
||||
def visit_pass(self, node):
|
||||
"""return an astroid.Pass node as string"""
|
||||
return 'pass'
|
||||
|
||||
def visit_print(self, node):
|
||||
"""return an astroid.Print node as string"""
|
||||
nodes = ', '.join([n.accept(self) for n in node.values])
|
||||
if not node.nl:
|
||||
nodes = '%s,' % nodes
|
||||
if node.dest:
|
||||
return 'print >> %s, %s' % (node.dest.accept(self), nodes)
|
||||
return 'print %s' % nodes
|
||||
|
||||
def visit_raise(self, node):
|
||||
"""return an astroid.Raise node as string"""
|
||||
if node.exc:
|
||||
if node.inst:
|
||||
if node.tback:
|
||||
return 'raise %s, %s, %s' % (node.exc.accept(self),
|
||||
node.inst.accept(self),
|
||||
node.tback.accept(self))
|
||||
return 'raise %s, %s' % (node.exc.accept(self),
|
||||
node.inst.accept(self))
|
||||
return 'raise %s' % node.exc.accept(self)
|
||||
return 'raise'
|
||||
|
||||
def visit_return(self, node):
|
||||
"""return an astroid.Return node as string"""
|
||||
if node.value:
|
||||
return 'return %s' % node.value.accept(self)
|
||||
else:
|
||||
return 'return'
|
||||
|
||||
def visit_index(self, node):
|
||||
"""return a astroid.Index node as string"""
|
||||
return node.value.accept(self)
|
||||
|
||||
def visit_set(self, node):
|
||||
"""return an astroid.Set node as string"""
|
||||
return '{%s}' % ', '.join([child.accept(self) for child in node.elts])
|
||||
|
||||
def visit_setcomp(self, node):
|
||||
"""return an astroid.SetComp node as string"""
|
||||
return '{%s %s}' % (node.elt.accept(self),
|
||||
' '.join([n.accept(self) for n in node.generators]))
|
||||
|
||||
def visit_slice(self, node):
|
||||
"""return a astroid.Slice node as string"""
|
||||
lower = node.lower and node.lower.accept(self) or ''
|
||||
upper = node.upper and node.upper.accept(self) or ''
|
||||
step = node.step and node.step.accept(self) or ''
|
||||
if step:
|
||||
return '%s:%s:%s' % (lower, upper, step)
|
||||
return '%s:%s' % (lower, upper)
|
||||
|
||||
def visit_subscript(self, node):
|
||||
"""return an astroid.Subscript node as string"""
|
||||
return '%s[%s]' % (node.value.accept(self), node.slice.accept(self))
|
||||
|
||||
def visit_tryexcept(self, node):
|
||||
"""return an astroid.TryExcept node as string"""
|
||||
trys = ['try:\n%s' % self._stmt_list(node.body)]
|
||||
for handler in node.handlers:
|
||||
trys.append(handler.accept(self))
|
||||
if node.orelse:
|
||||
trys.append('else:\n%s' % self._stmt_list(node.orelse))
|
||||
return '\n'.join(trys)
|
||||
|
||||
def visit_tryfinally(self, node):
|
||||
"""return an astroid.TryFinally node as string"""
|
||||
return 'try:\n%s\nfinally:\n%s' % (self._stmt_list(node.body),
|
||||
self._stmt_list(node.finalbody))
|
||||
|
||||
def visit_tuple(self, node):
|
||||
"""return an astroid.Tuple node as string"""
|
||||
if len(node.elts) == 1:
|
||||
return '(%s, )' % node.elts[0].accept(self)
|
||||
return '(%s)' % ', '.join([child.accept(self) for child in node.elts])
|
||||
|
||||
def visit_unaryop(self, node):
|
||||
"""return an astroid.UnaryOp node as string"""
|
||||
if node.op == 'not':
|
||||
operator = 'not '
|
||||
else:
|
||||
operator = node.op
|
||||
return '%s%s' % (operator, node.operand.accept(self))
|
||||
|
||||
def visit_while(self, node):
|
||||
"""return an astroid.While node as string"""
|
||||
whiles = 'while %s:\n%s' % (node.test.accept(self),
|
||||
self._stmt_list(node.body))
|
||||
if node.orelse:
|
||||
whiles = '%s\nelse:\n%s' % (whiles, self._stmt_list(node.orelse))
|
||||
return whiles
|
||||
|
||||
def visit_with(self, node): # 'with' without 'as' is possible
|
||||
"""return an astroid.With node as string"""
|
||||
items = ', '.join(('(%s)' % expr.accept(self)) +
|
||||
(vars and ' as (%s)' % (vars.accept(self)) or '')
|
||||
for expr, vars in node.items)
|
||||
return 'with %s:\n%s' % (items, self._stmt_list(node.body))
|
||||
|
||||
def visit_yield(self, node):
|
||||
"""yield an ast.Yield node as string"""
|
||||
yi_val = node.value and (" " + node.value.accept(self)) or ""
|
||||
expr = 'yield' + yi_val
|
||||
if node.parent.is_statement:
|
||||
return expr
|
||||
else:
|
||||
return "(%s)" % (expr,)
|
||||
|
||||
|
||||
class AsStringVisitor3k(AsStringVisitor):
|
||||
"""AsStringVisitor3k overwrites some AsStringVisitor methods"""
|
||||
|
||||
def visit_excepthandler(self, node):
|
||||
if node.type:
|
||||
if node.name:
|
||||
excs = 'except %s as %s' % (node.type.accept(self),
|
||||
node.name.accept(self))
|
||||
else:
|
||||
excs = 'except %s' % node.type.accept(self)
|
||||
else:
|
||||
excs = 'except'
|
||||
return '%s:\n%s' % (excs, self._stmt_list(node.body))
|
||||
|
||||
def visit_nonlocal(self, node):
|
||||
"""return an astroid.Nonlocal node as string"""
|
||||
return 'nonlocal %s' % ', '.join(node.names)
|
||||
|
||||
def visit_raise(self, node):
|
||||
"""return an astroid.Raise node as string"""
|
||||
if node.exc:
|
||||
if node.cause:
|
||||
return 'raise %s from %s' % (node.exc.accept(self),
|
||||
node.cause.accept(self))
|
||||
return 'raise %s' % node.exc.accept(self)
|
||||
return 'raise'
|
||||
|
||||
def visit_starred(self, node):
|
||||
"""return Starred node as string"""
|
||||
return "*" + node.value.accept(self)
|
||||
|
||||
def visit_yieldfrom(self, node):
|
||||
""" Return an astroid.YieldFrom node as string. """
|
||||
yi_val = node.value and (" " + node.value.accept(self)) or ""
|
||||
expr = 'yield from' + yi_val
|
||||
if node.parent.is_statement:
|
||||
return expr
|
||||
else:
|
||||
return "(%s)" % (expr,)
|
||||
|
||||
|
||||
def _import_string(names):
|
||||
"""return a list of (name, asname) formatted as a string"""
|
||||
_names = []
|
||||
for name, asname in names:
|
||||
if asname is not None:
|
||||
_names.append('%s as %s' % (name, asname))
|
||||
else:
|
||||
_names.append(name)
|
||||
return ', '.join(_names)
|
||||
|
||||
|
||||
if sys.version_info >= (3, 0):
|
||||
AsStringVisitor = AsStringVisitor3k
|
||||
|
||||
# this visitor is stateless, thus it can be reused
|
||||
to_code = AsStringVisitor()
|
||||
|
||||
|
|
@ -0,0 +1,86 @@
|
|||
# copyright 2003-2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
|
||||
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
|
||||
#
|
||||
# This file is part of astroid.
|
||||
#
|
||||
# astroid is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by the
|
||||
# Free Software Foundation, either version 2.1 of the License, or (at your
|
||||
# option) any later version.
|
||||
#
|
||||
# astroid is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
|
||||
# for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License along
|
||||
# with astroid. If not, see <http://www.gnu.org/licenses/>.
|
||||
"""Small AST optimizations."""
|
||||
|
||||
import _ast
|
||||
|
||||
from astroid import nodes
|
||||
|
||||
|
||||
__all__ = ('ASTPeepholeOptimizer', )
|
||||
|
||||
|
||||
try:
|
||||
_TYPES = (_ast.Str, _ast.Bytes)
|
||||
except AttributeError:
|
||||
_TYPES = (_ast.Str, )
|
||||
|
||||
|
||||
class ASTPeepholeOptimizer(object):
|
||||
"""Class for applying small optimizations to generate new AST."""
|
||||
|
||||
def optimize_binop(self, node):
|
||||
"""Optimize BinOps with string Const nodes on the lhs.
|
||||
|
||||
This fixes an infinite recursion crash, where multiple
|
||||
strings are joined using the addition operator. With a
|
||||
sufficient number of such strings, astroid will fail
|
||||
with a maximum recursion limit exceeded. The
|
||||
function will return a Const node with all the strings
|
||||
already joined.
|
||||
Return ``None`` if no AST node can be obtained
|
||||
through optimization.
|
||||
"""
|
||||
ast_nodes = []
|
||||
current = node
|
||||
while isinstance(current, _ast.BinOp):
|
||||
# lhs must be a BinOp with the addition operand.
|
||||
if not isinstance(current.left, _ast.BinOp):
|
||||
return
|
||||
if (not isinstance(current.left.op, _ast.Add)
|
||||
or not isinstance(current.op, _ast.Add)):
|
||||
return
|
||||
|
||||
# rhs must a str / bytes.
|
||||
if not isinstance(current.right, _TYPES):
|
||||
return
|
||||
|
||||
ast_nodes.append(current.right.s)
|
||||
current = current.left
|
||||
|
||||
if (isinstance(current, _ast.BinOp)
|
||||
and isinstance(current.left, _TYPES)
|
||||
and isinstance(current.right, _TYPES)):
|
||||
# Stop early if we are at the last BinOp in
|
||||
# the operation
|
||||
ast_nodes.append(current.right.s)
|
||||
ast_nodes.append(current.left.s)
|
||||
break
|
||||
|
||||
if not ast_nodes:
|
||||
return
|
||||
|
||||
# If we have inconsistent types, bail out.
|
||||
known = type(ast_nodes[0])
|
||||
if any(type(element) is not known
|
||||
for element in ast_nodes[1:]):
|
||||
return
|
||||
|
||||
value = known().join(reversed(ast_nodes))
|
||||
newnode = nodes.Const(value)
|
||||
return newnode
|
||||
652
plugins/bundle/python-mode/pymode/libs/astroid/bases.py
Normal file
652
plugins/bundle/python-mode/pymode/libs/astroid/bases.py
Normal file
|
|
@ -0,0 +1,652 @@
|
|||
# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
|
||||
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
|
||||
#
|
||||
# This file is part of astroid.
|
||||
#
|
||||
# astroid is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by the
|
||||
# Free Software Foundation, either version 2.1 of the License, or (at your
|
||||
# option) any later version.
|
||||
#
|
||||
# astroid is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
|
||||
# for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License along
|
||||
# with astroid. If not, see <http://www.gnu.org/licenses/>.
|
||||
"""This module contains base classes and functions for the nodes and some
|
||||
inference utils.
|
||||
"""
|
||||
|
||||
__docformat__ = "restructuredtext en"
|
||||
|
||||
import sys
|
||||
from contextlib import contextmanager
|
||||
|
||||
from logilab.common.decorators import cachedproperty
|
||||
|
||||
from astroid.exceptions import (InferenceError, AstroidError, NotFoundError,
|
||||
UnresolvableName, UseInferenceDefault)
|
||||
|
||||
|
||||
if sys.version_info >= (3, 0):
|
||||
BUILTINS = 'builtins'
|
||||
else:
|
||||
BUILTINS = '__builtin__'
|
||||
|
||||
|
||||
class Proxy(object):
|
||||
"""a simple proxy object"""
|
||||
|
||||
_proxied = None # proxied object may be set by class or by instance
|
||||
|
||||
def __init__(self, proxied=None):
|
||||
if proxied is not None:
|
||||
self._proxied = proxied
|
||||
|
||||
def __getattr__(self, name):
|
||||
if name == '_proxied':
|
||||
return getattr(self.__class__, '_proxied')
|
||||
if name in self.__dict__:
|
||||
return self.__dict__[name]
|
||||
return getattr(self._proxied, name)
|
||||
|
||||
def infer(self, context=None):
|
||||
yield self
|
||||
|
||||
|
||||
# Inference ##################################################################
|
||||
|
||||
class InferenceContext(object):
|
||||
__slots__ = ('path', 'lookupname', 'callcontext', 'boundnode', 'infered')
|
||||
|
||||
def __init__(self, path=None, infered=None):
|
||||
self.path = path or set()
|
||||
self.lookupname = None
|
||||
self.callcontext = None
|
||||
self.boundnode = None
|
||||
self.infered = infered or {}
|
||||
|
||||
def push(self, node):
|
||||
name = self.lookupname
|
||||
if (node, name) in self.path:
|
||||
raise StopIteration()
|
||||
self.path.add((node, name))
|
||||
|
||||
def clone(self):
|
||||
# XXX copy lookupname/callcontext ?
|
||||
clone = InferenceContext(self.path, infered=self.infered)
|
||||
clone.callcontext = self.callcontext
|
||||
clone.boundnode = self.boundnode
|
||||
return clone
|
||||
|
||||
def cache_generator(self, key, generator):
|
||||
results = []
|
||||
for result in generator:
|
||||
results.append(result)
|
||||
yield result
|
||||
|
||||
self.infered[key] = tuple(results)
|
||||
return
|
||||
|
||||
@contextmanager
|
||||
def restore_path(self):
|
||||
path = set(self.path)
|
||||
yield
|
||||
self.path = path
|
||||
|
||||
def copy_context(context):
|
||||
if context is not None:
|
||||
return context.clone()
|
||||
else:
|
||||
return InferenceContext()
|
||||
|
||||
|
||||
def _infer_stmts(stmts, context, frame=None):
|
||||
"""return an iterator on statements inferred by each statement in <stmts>
|
||||
"""
|
||||
stmt = None
|
||||
infered = False
|
||||
if context is not None:
|
||||
name = context.lookupname
|
||||
context = context.clone()
|
||||
else:
|
||||
name = None
|
||||
context = InferenceContext()
|
||||
for stmt in stmts:
|
||||
if stmt is YES:
|
||||
yield stmt
|
||||
infered = True
|
||||
continue
|
||||
context.lookupname = stmt._infer_name(frame, name)
|
||||
try:
|
||||
for infered in stmt.infer(context):
|
||||
yield infered
|
||||
infered = True
|
||||
except UnresolvableName:
|
||||
continue
|
||||
except InferenceError:
|
||||
yield YES
|
||||
infered = True
|
||||
if not infered:
|
||||
raise InferenceError(str(stmt))
|
||||
|
||||
|
||||
# special inference objects (e.g. may be returned as nodes by .infer()) #######
|
||||
|
||||
class _Yes(object):
|
||||
"""a yes object"""
|
||||
def __repr__(self):
|
||||
return 'YES'
|
||||
def __getattribute__(self, name):
|
||||
if name == 'next':
|
||||
raise AttributeError('next method should not be called')
|
||||
if name.startswith('__') and name.endswith('__'):
|
||||
# to avoid inspection pb
|
||||
return super(_Yes, self).__getattribute__(name)
|
||||
return self
|
||||
def __call__(self, *args, **kwargs):
|
||||
return self
|
||||
|
||||
|
||||
YES = _Yes()
|
||||
|
||||
|
||||
class Instance(Proxy):
|
||||
"""a special node representing a class instance"""
|
||||
def getattr(self, name, context=None, lookupclass=True):
|
||||
try:
|
||||
values = self._proxied.instance_attr(name, context)
|
||||
except NotFoundError:
|
||||
if name == '__class__':
|
||||
return [self._proxied]
|
||||
if lookupclass:
|
||||
# class attributes not available through the instance
|
||||
# unless they are explicitly defined
|
||||
if name in ('__name__', '__bases__', '__mro__', '__subclasses__'):
|
||||
return self._proxied.local_attr(name)
|
||||
return self._proxied.getattr(name, context)
|
||||
raise NotFoundError(name)
|
||||
# since we've no context information, return matching class members as
|
||||
# well
|
||||
if lookupclass:
|
||||
try:
|
||||
return values + self._proxied.getattr(name, context)
|
||||
except NotFoundError:
|
||||
pass
|
||||
return values
|
||||
|
||||
def igetattr(self, name, context=None):
|
||||
"""inferred getattr"""
|
||||
if not context:
|
||||
context = InferenceContext()
|
||||
try:
|
||||
# avoid recursively inferring the same attr on the same class
|
||||
|
||||
context.push((self._proxied, name))
|
||||
# XXX frame should be self._proxied, or not ?
|
||||
get_attr = self.getattr(name, context, lookupclass=False)
|
||||
return _infer_stmts(
|
||||
self._wrap_attr(get_attr, context),
|
||||
context,
|
||||
frame=self,
|
||||
)
|
||||
except NotFoundError:
|
||||
try:
|
||||
# fallback to class'igetattr since it has some logic to handle
|
||||
# descriptors
|
||||
return self._wrap_attr(self._proxied.igetattr(name, context),
|
||||
context)
|
||||
except NotFoundError:
|
||||
raise InferenceError(name)
|
||||
|
||||
def _wrap_attr(self, attrs, context=None):
|
||||
"""wrap bound methods of attrs in a InstanceMethod proxies"""
|
||||
for attr in attrs:
|
||||
if isinstance(attr, UnboundMethod):
|
||||
if BUILTINS + '.property' in attr.decoratornames():
|
||||
for infered in attr.infer_call_result(self, context):
|
||||
yield infered
|
||||
else:
|
||||
yield BoundMethod(attr, self)
|
||||
else:
|
||||
yield attr
|
||||
|
||||
def infer_call_result(self, caller, context=None):
|
||||
"""infer what a class instance is returning when called"""
|
||||
infered = False
|
||||
for node in self._proxied.igetattr('__call__', context):
|
||||
if node is YES:
|
||||
continue
|
||||
for res in node.infer_call_result(caller, context):
|
||||
infered = True
|
||||
yield res
|
||||
if not infered:
|
||||
raise InferenceError()
|
||||
|
||||
def __repr__(self):
|
||||
return '<Instance of %s.%s at 0x%s>' % (self._proxied.root().name,
|
||||
self._proxied.name,
|
||||
id(self))
|
||||
def __str__(self):
|
||||
return 'Instance of %s.%s' % (self._proxied.root().name,
|
||||
self._proxied.name)
|
||||
|
||||
def callable(self):
|
||||
try:
|
||||
self._proxied.getattr('__call__')
|
||||
return True
|
||||
except NotFoundError:
|
||||
return False
|
||||
|
||||
def pytype(self):
|
||||
return self._proxied.qname()
|
||||
|
||||
def display_type(self):
|
||||
return 'Instance of'
|
||||
|
||||
|
||||
class UnboundMethod(Proxy):
|
||||
"""a special node representing a method not bound to an instance"""
|
||||
def __repr__(self):
|
||||
frame = self._proxied.parent.frame()
|
||||
return '<%s %s of %s at 0x%s' % (self.__class__.__name__,
|
||||
self._proxied.name,
|
||||
frame.qname(), id(self))
|
||||
|
||||
def is_bound(self):
|
||||
return False
|
||||
|
||||
def getattr(self, name, context=None):
|
||||
if name == 'im_func':
|
||||
return [self._proxied]
|
||||
return super(UnboundMethod, self).getattr(name, context)
|
||||
|
||||
def igetattr(self, name, context=None):
|
||||
if name == 'im_func':
|
||||
return iter((self._proxied,))
|
||||
return super(UnboundMethod, self).igetattr(name, context)
|
||||
|
||||
def infer_call_result(self, caller, context):
|
||||
# If we're unbound method __new__ of builtin object, the result is an
|
||||
# instance of the class given as first argument.
|
||||
if (self._proxied.name == '__new__' and
|
||||
self._proxied.parent.frame().qname() == '%s.object' % BUILTINS):
|
||||
infer = caller.args[0].infer() if caller.args else []
|
||||
return ((x is YES and x or Instance(x)) for x in infer)
|
||||
return self._proxied.infer_call_result(caller, context)
|
||||
|
||||
|
||||
class BoundMethod(UnboundMethod):
|
||||
"""a special node representing a method bound to an instance"""
|
||||
def __init__(self, proxy, bound):
|
||||
UnboundMethod.__init__(self, proxy)
|
||||
self.bound = bound
|
||||
|
||||
def is_bound(self):
|
||||
return True
|
||||
|
||||
def infer_call_result(self, caller, context):
|
||||
context = context.clone()
|
||||
context.boundnode = self.bound
|
||||
return self._proxied.infer_call_result(caller, context)
|
||||
|
||||
|
||||
class Generator(Instance):
|
||||
"""a special node representing a generator.
|
||||
|
||||
Proxied class is set once for all in raw_building.
|
||||
"""
|
||||
def callable(self):
|
||||
return False
|
||||
|
||||
def pytype(self):
|
||||
return '%s.generator' % BUILTINS
|
||||
|
||||
def display_type(self):
|
||||
return 'Generator'
|
||||
|
||||
def __repr__(self):
|
||||
return '<Generator(%s) l.%s at 0x%s>' % (self._proxied.name, self.lineno, id(self))
|
||||
|
||||
def __str__(self):
|
||||
return 'Generator(%s)' % (self._proxied.name)
|
||||
|
||||
|
||||
# decorators ##################################################################
|
||||
|
||||
def path_wrapper(func):
|
||||
"""return the given infer function wrapped to handle the path"""
|
||||
def wrapped(node, context=None, _func=func, **kwargs):
|
||||
"""wrapper function handling context"""
|
||||
if context is None:
|
||||
context = InferenceContext()
|
||||
context.push(node)
|
||||
yielded = set()
|
||||
for res in _func(node, context, **kwargs):
|
||||
# unproxy only true instance, not const, tuple, dict...
|
||||
if res.__class__ is Instance:
|
||||
ares = res._proxied
|
||||
else:
|
||||
ares = res
|
||||
if not ares in yielded:
|
||||
yield res
|
||||
yielded.add(ares)
|
||||
return wrapped
|
||||
|
||||
def yes_if_nothing_infered(func):
|
||||
def wrapper(*args, **kwargs):
|
||||
infered = False
|
||||
for node in func(*args, **kwargs):
|
||||
infered = True
|
||||
yield node
|
||||
if not infered:
|
||||
yield YES
|
||||
return wrapper
|
||||
|
||||
def raise_if_nothing_infered(func):
|
||||
def wrapper(*args, **kwargs):
|
||||
infered = False
|
||||
for node in func(*args, **kwargs):
|
||||
infered = True
|
||||
yield node
|
||||
if not infered:
|
||||
raise InferenceError()
|
||||
return wrapper
|
||||
|
||||
|
||||
# Node ######################################################################
|
||||
|
||||
class NodeNG(object):
|
||||
"""Base Class for all Astroid node classes.
|
||||
|
||||
It represents a node of the new abstract syntax tree.
|
||||
"""
|
||||
is_statement = False
|
||||
optional_assign = False # True for For (and for Comprehension if py <3.0)
|
||||
is_function = False # True for Function nodes
|
||||
# attributes below are set by the builder module or by raw factories
|
||||
lineno = None
|
||||
fromlineno = None
|
||||
tolineno = None
|
||||
col_offset = None
|
||||
# parent node in the tree
|
||||
parent = None
|
||||
# attributes containing child node(s) redefined in most concrete classes:
|
||||
_astroid_fields = ()
|
||||
# instance specific inference function infer(node, context)
|
||||
_explicit_inference = None
|
||||
|
||||
def infer(self, context=None, **kwargs):
|
||||
"""main interface to the interface system, return a generator on infered
|
||||
values.
|
||||
|
||||
If the instance has some explicit inference function set, it will be
|
||||
called instead of the default interface.
|
||||
"""
|
||||
if self._explicit_inference is not None:
|
||||
# explicit_inference is not bound, give it self explicitly
|
||||
try:
|
||||
return self._explicit_inference(self, context, **kwargs)
|
||||
except UseInferenceDefault:
|
||||
pass
|
||||
|
||||
if not context:
|
||||
return self._infer(context, **kwargs)
|
||||
|
||||
key = (self, context.lookupname,
|
||||
context.callcontext, context.boundnode)
|
||||
if key in context.infered:
|
||||
return iter(context.infered[key])
|
||||
|
||||
return context.cache_generator(key, self._infer(context, **kwargs))
|
||||
|
||||
def _repr_name(self):
|
||||
"""return self.name or self.attrname or '' for nice representation"""
|
||||
return getattr(self, 'name', getattr(self, 'attrname', ''))
|
||||
|
||||
def __str__(self):
|
||||
return '%s(%s)' % (self.__class__.__name__, self._repr_name())
|
||||
|
||||
def __repr__(self):
|
||||
return '<%s(%s) l.%s [%s] at 0x%x>' % (self.__class__.__name__,
|
||||
self._repr_name(),
|
||||
self.fromlineno,
|
||||
self.root().name,
|
||||
id(self))
|
||||
|
||||
|
||||
def accept(self, visitor):
|
||||
func = getattr(visitor, "visit_" + self.__class__.__name__.lower())
|
||||
return func(self)
|
||||
|
||||
def get_children(self):
|
||||
for field in self._astroid_fields:
|
||||
attr = getattr(self, field)
|
||||
if attr is None:
|
||||
continue
|
||||
if isinstance(attr, (list, tuple)):
|
||||
for elt in attr:
|
||||
yield elt
|
||||
else:
|
||||
yield attr
|
||||
|
||||
def last_child(self):
|
||||
"""an optimized version of list(get_children())[-1]"""
|
||||
for field in self._astroid_fields[::-1]:
|
||||
attr = getattr(self, field)
|
||||
if not attr: # None or empty listy / tuple
|
||||
continue
|
||||
if attr.__class__ in (list, tuple):
|
||||
return attr[-1]
|
||||
else:
|
||||
return attr
|
||||
return None
|
||||
|
||||
def parent_of(self, node):
|
||||
"""return true if i'm a parent of the given node"""
|
||||
parent = node.parent
|
||||
while parent is not None:
|
||||
if self is parent:
|
||||
return True
|
||||
parent = parent.parent
|
||||
return False
|
||||
|
||||
def statement(self):
|
||||
"""return the first parent node marked as statement node"""
|
||||
if self.is_statement:
|
||||
return self
|
||||
return self.parent.statement()
|
||||
|
||||
def frame(self):
|
||||
"""return the first parent frame node (i.e. Module, Function or Class)
|
||||
"""
|
||||
return self.parent.frame()
|
||||
|
||||
def scope(self):
|
||||
"""return the first node defining a new scope (i.e. Module, Function,
|
||||
Class, Lambda but also GenExpr)
|
||||
"""
|
||||
return self.parent.scope()
|
||||
|
||||
def root(self):
|
||||
"""return the root node of the tree, (i.e. a Module)"""
|
||||
if self.parent:
|
||||
return self.parent.root()
|
||||
return self
|
||||
|
||||
def child_sequence(self, child):
|
||||
"""search for the right sequence where the child lies in"""
|
||||
for field in self._astroid_fields:
|
||||
node_or_sequence = getattr(self, field)
|
||||
if node_or_sequence is child:
|
||||
return [node_or_sequence]
|
||||
# /!\ compiler.ast Nodes have an __iter__ walking over child nodes
|
||||
if isinstance(node_or_sequence, (tuple, list)) and child in node_or_sequence:
|
||||
return node_or_sequence
|
||||
else:
|
||||
msg = 'Could not find %s in %s\'s children'
|
||||
raise AstroidError(msg % (repr(child), repr(self)))
|
||||
|
||||
def locate_child(self, child):
|
||||
"""return a 2-uple (child attribute name, sequence or node)"""
|
||||
for field in self._astroid_fields:
|
||||
node_or_sequence = getattr(self, field)
|
||||
# /!\ compiler.ast Nodes have an __iter__ walking over child nodes
|
||||
if child is node_or_sequence:
|
||||
return field, child
|
||||
if isinstance(node_or_sequence, (tuple, list)) and child in node_or_sequence:
|
||||
return field, node_or_sequence
|
||||
msg = 'Could not find %s in %s\'s children'
|
||||
raise AstroidError(msg % (repr(child), repr(self)))
|
||||
# FIXME : should we merge child_sequence and locate_child ? locate_child
|
||||
# is only used in are_exclusive, child_sequence one time in pylint.
|
||||
|
||||
def next_sibling(self):
|
||||
"""return the next sibling statement"""
|
||||
return self.parent.next_sibling()
|
||||
|
||||
def previous_sibling(self):
|
||||
"""return the previous sibling statement"""
|
||||
return self.parent.previous_sibling()
|
||||
|
||||
def nearest(self, nodes):
|
||||
"""return the node which is the nearest before this one in the
|
||||
given list of nodes
|
||||
"""
|
||||
myroot = self.root()
|
||||
mylineno = self.fromlineno
|
||||
nearest = None, 0
|
||||
for node in nodes:
|
||||
assert node.root() is myroot, \
|
||||
'nodes %s and %s are not from the same module' % (self, node)
|
||||
lineno = node.fromlineno
|
||||
if node.fromlineno > mylineno:
|
||||
break
|
||||
if lineno > nearest[1]:
|
||||
nearest = node, lineno
|
||||
# FIXME: raise an exception if nearest is None ?
|
||||
return nearest[0]
|
||||
|
||||
# these are lazy because they're relatively expensive to compute for every
|
||||
# single node, and they rarely get looked at
|
||||
|
||||
@cachedproperty
|
||||
def fromlineno(self):
|
||||
if self.lineno is None:
|
||||
return self._fixed_source_line()
|
||||
else:
|
||||
return self.lineno
|
||||
|
||||
@cachedproperty
|
||||
def tolineno(self):
|
||||
if not self._astroid_fields:
|
||||
# can't have children
|
||||
lastchild = None
|
||||
else:
|
||||
lastchild = self.last_child()
|
||||
if lastchild is None:
|
||||
return self.fromlineno
|
||||
else:
|
||||
return lastchild.tolineno
|
||||
|
||||
# TODO / FIXME:
|
||||
assert self.fromlineno is not None, self
|
||||
assert self.tolineno is not None, self
|
||||
|
||||
def _fixed_source_line(self):
|
||||
"""return the line number where the given node appears
|
||||
|
||||
we need this method since not all nodes have the lineno attribute
|
||||
correctly set...
|
||||
"""
|
||||
line = self.lineno
|
||||
_node = self
|
||||
try:
|
||||
while line is None:
|
||||
_node = next(_node.get_children())
|
||||
line = _node.lineno
|
||||
except StopIteration:
|
||||
_node = self.parent
|
||||
while _node and line is None:
|
||||
line = _node.lineno
|
||||
_node = _node.parent
|
||||
return line
|
||||
|
||||
def block_range(self, lineno):
|
||||
"""handle block line numbers range for non block opening statements
|
||||
"""
|
||||
return lineno, self.tolineno
|
||||
|
||||
def set_local(self, name, stmt):
|
||||
"""delegate to a scoped parent handling a locals dictionary"""
|
||||
self.parent.set_local(name, stmt)
|
||||
|
||||
def nodes_of_class(self, klass, skip_klass=None):
|
||||
"""return an iterator on nodes which are instance of the given class(es)
|
||||
|
||||
klass may be a class object or a tuple of class objects
|
||||
"""
|
||||
if isinstance(self, klass):
|
||||
yield self
|
||||
for child_node in self.get_children():
|
||||
if skip_klass is not None and isinstance(child_node, skip_klass):
|
||||
continue
|
||||
for matching in child_node.nodes_of_class(klass, skip_klass):
|
||||
yield matching
|
||||
|
||||
def _infer_name(self, frame, name):
|
||||
# overridden for From, Import, Global, TryExcept and Arguments
|
||||
return None
|
||||
|
||||
def _infer(self, context=None):
|
||||
"""we don't know how to resolve a statement by default"""
|
||||
# this method is overridden by most concrete classes
|
||||
raise InferenceError(self.__class__.__name__)
|
||||
|
||||
def infered(self):
|
||||
'''return list of infered values for a more simple inference usage'''
|
||||
return list(self.infer())
|
||||
|
||||
def instanciate_class(self):
|
||||
"""instanciate a node if it is a Class node, else return self"""
|
||||
return self
|
||||
|
||||
def has_base(self, node):
|
||||
return False
|
||||
|
||||
def callable(self):
|
||||
return False
|
||||
|
||||
def eq(self, value):
|
||||
return False
|
||||
|
||||
def as_string(self):
|
||||
from astroid.as_string import to_code
|
||||
return to_code(self)
|
||||
|
||||
def repr_tree(self, ids=False):
|
||||
from astroid.as_string import dump
|
||||
return dump(self)
|
||||
|
||||
|
||||
class Statement(NodeNG):
|
||||
"""Statement node adding a few attributes"""
|
||||
is_statement = True
|
||||
|
||||
def next_sibling(self):
|
||||
"""return the next sibling statement"""
|
||||
stmts = self.parent.child_sequence(self)
|
||||
index = stmts.index(self)
|
||||
try:
|
||||
return stmts[index +1]
|
||||
except IndexError:
|
||||
pass
|
||||
|
||||
def previous_sibling(self):
|
||||
"""return the previous sibling statement"""
|
||||
stmts = self.parent.child_sequence(self)
|
||||
index = stmts.index(self)
|
||||
if index >= 1:
|
||||
return stmts[index -1]
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
|
@ -0,0 +1,245 @@
|
|||
"""Astroid hooks for various builtins."""
|
||||
|
||||
import sys
|
||||
from functools import partial
|
||||
from textwrap import dedent
|
||||
|
||||
import six
|
||||
from astroid import (MANAGER, UseInferenceDefault,
|
||||
inference_tip, YES, InferenceError, UnresolvableName)
|
||||
from astroid import nodes
|
||||
from astroid.builder import AstroidBuilder
|
||||
|
||||
|
||||
def _extend_str(class_node, rvalue):
|
||||
"""function to extend builtin str/unicode class"""
|
||||
# TODO(cpopa): this approach will make astroid to believe
|
||||
# that some arguments can be passed by keyword, but
|
||||
# unfortunately, strings and bytes don't accept keyword arguments.
|
||||
code = dedent('''
|
||||
class whatever(object):
|
||||
def join(self, iterable):
|
||||
return {rvalue}
|
||||
def replace(self, old, new, count=None):
|
||||
return {rvalue}
|
||||
def format(self, *args, **kwargs):
|
||||
return {rvalue}
|
||||
def encode(self, encoding='ascii', errors=None):
|
||||
return ''
|
||||
def decode(self, encoding='ascii', errors=None):
|
||||
return u''
|
||||
def capitalize(self):
|
||||
return {rvalue}
|
||||
def title(self):
|
||||
return {rvalue}
|
||||
def lower(self):
|
||||
return {rvalue}
|
||||
def upper(self):
|
||||
return {rvalue}
|
||||
def swapcase(self):
|
||||
return {rvalue}
|
||||
def index(self, sub, start=None, end=None):
|
||||
return 0
|
||||
def find(self, sub, start=None, end=None):
|
||||
return 0
|
||||
def count(self, sub, start=None, end=None):
|
||||
return 0
|
||||
def strip(self, chars=None):
|
||||
return {rvalue}
|
||||
def lstrip(self, chars=None):
|
||||
return {rvalue}
|
||||
def rstrip(self, chars=None):
|
||||
return {rvalue}
|
||||
def rjust(self, width, fillchar=None):
|
||||
return {rvalue}
|
||||
def center(self, width, fillchar=None):
|
||||
return {rvalue}
|
||||
def ljust(self, width, fillchar=None):
|
||||
return {rvalue}
|
||||
''')
|
||||
code = code.format(rvalue=rvalue)
|
||||
fake = AstroidBuilder(MANAGER).string_build(code)['whatever']
|
||||
for method in fake.mymethods():
|
||||
class_node.locals[method.name] = [method]
|
||||
method.parent = class_node
|
||||
|
||||
def extend_builtins(class_transforms):
|
||||
from astroid.bases import BUILTINS
|
||||
builtin_ast = MANAGER.astroid_cache[BUILTINS]
|
||||
for class_name, transform in class_transforms.items():
|
||||
transform(builtin_ast[class_name])
|
||||
|
||||
if sys.version_info > (3, 0):
|
||||
extend_builtins({'bytes': partial(_extend_str, rvalue="b''"),
|
||||
'str': partial(_extend_str, rvalue="''")})
|
||||
else:
|
||||
extend_builtins({'str': partial(_extend_str, rvalue="''"),
|
||||
'unicode': partial(_extend_str, rvalue="u''")})
|
||||
|
||||
|
||||
def register_builtin_transform(transform, builtin_name):
|
||||
"""Register a new transform function for the given *builtin_name*.
|
||||
|
||||
The transform function must accept two parameters, a node and
|
||||
an optional context.
|
||||
"""
|
||||
def _transform_wrapper(node, context=None):
|
||||
result = transform(node, context=context)
|
||||
if result:
|
||||
result.parent = node
|
||||
result.lineno = node.lineno
|
||||
result.col_offset = node.col_offset
|
||||
return iter([result])
|
||||
|
||||
MANAGER.register_transform(nodes.CallFunc,
|
||||
inference_tip(_transform_wrapper),
|
||||
lambda n: (isinstance(n.func, nodes.Name) and
|
||||
n.func.name == builtin_name))
|
||||
|
||||
|
||||
def _generic_inference(node, context, node_type, transform):
|
||||
args = node.args
|
||||
if not args:
|
||||
return node_type()
|
||||
if len(node.args) > 1:
|
||||
raise UseInferenceDefault()
|
||||
|
||||
arg, = args
|
||||
transformed = transform(arg)
|
||||
if not transformed:
|
||||
try:
|
||||
infered = next(arg.infer(context=context))
|
||||
except (InferenceError, StopIteration):
|
||||
raise UseInferenceDefault()
|
||||
if infered is YES:
|
||||
raise UseInferenceDefault()
|
||||
transformed = transform(infered)
|
||||
if not transformed or transformed is YES:
|
||||
raise UseInferenceDefault()
|
||||
return transformed
|
||||
|
||||
|
||||
def _generic_transform(arg, klass, iterables, build_elts):
|
||||
if isinstance(arg, klass):
|
||||
return arg
|
||||
elif isinstance(arg, iterables):
|
||||
if not all(isinstance(elt, nodes.Const)
|
||||
for elt in arg.elts):
|
||||
# TODO(cpopa): Don't support heterogenous elements.
|
||||
# Not yet, though.
|
||||
raise UseInferenceDefault()
|
||||
elts = [elt.value for elt in arg.elts]
|
||||
elif isinstance(arg, nodes.Dict):
|
||||
if not all(isinstance(elt[0], nodes.Const)
|
||||
for elt in arg.items):
|
||||
raise UseInferenceDefault()
|
||||
elts = [item[0].value for item in arg.items]
|
||||
elif (isinstance(arg, nodes.Const) and
|
||||
isinstance(arg.value, (six.string_types, six.binary_type))):
|
||||
elts = arg.value
|
||||
else:
|
||||
return
|
||||
return klass(elts=build_elts(elts))
|
||||
|
||||
|
||||
def _infer_builtin(node, context,
|
||||
klass=None, iterables=None,
|
||||
build_elts=None):
|
||||
transform_func = partial(
|
||||
_generic_transform,
|
||||
klass=klass,
|
||||
iterables=iterables,
|
||||
build_elts=build_elts)
|
||||
|
||||
return _generic_inference(node, context, klass, transform_func)
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
infer_tuple = partial(
|
||||
_infer_builtin,
|
||||
klass=nodes.Tuple,
|
||||
iterables=(nodes.List, nodes.Set),
|
||||
build_elts=tuple)
|
||||
|
||||
infer_list = partial(
|
||||
_infer_builtin,
|
||||
klass=nodes.List,
|
||||
iterables=(nodes.Tuple, nodes.Set),
|
||||
build_elts=list)
|
||||
|
||||
infer_set = partial(
|
||||
_infer_builtin,
|
||||
klass=nodes.Set,
|
||||
iterables=(nodes.List, nodes.Tuple),
|
||||
build_elts=set)
|
||||
|
||||
|
||||
def _get_elts(arg, context):
|
||||
is_iterable = lambda n: isinstance(n,
|
||||
(nodes.List, nodes.Tuple, nodes.Set))
|
||||
try:
|
||||
infered = next(arg.infer(context))
|
||||
except (InferenceError, UnresolvableName):
|
||||
raise UseInferenceDefault()
|
||||
if isinstance(infered, nodes.Dict):
|
||||
items = infered.items
|
||||
elif is_iterable(infered):
|
||||
items = []
|
||||
for elt in infered.elts:
|
||||
# If an item is not a pair of two items,
|
||||
# then fallback to the default inference.
|
||||
# Also, take in consideration only hashable items,
|
||||
# tuples and consts. We are choosing Names as well.
|
||||
if not is_iterable(elt):
|
||||
raise UseInferenceDefault()
|
||||
if len(elt.elts) != 2:
|
||||
raise UseInferenceDefault()
|
||||
if not isinstance(elt.elts[0],
|
||||
(nodes.Tuple, nodes.Const, nodes.Name)):
|
||||
raise UseInferenceDefault()
|
||||
items.append(tuple(elt.elts))
|
||||
else:
|
||||
raise UseInferenceDefault()
|
||||
return items
|
||||
|
||||
def infer_dict(node, context=None):
|
||||
"""Try to infer a dict call to a Dict node.
|
||||
|
||||
The function treats the following cases:
|
||||
|
||||
* dict()
|
||||
* dict(mapping)
|
||||
* dict(iterable)
|
||||
* dict(iterable, **kwargs)
|
||||
* dict(mapping, **kwargs)
|
||||
* dict(**kwargs)
|
||||
|
||||
If a case can't be infered, we'll fallback to default inference.
|
||||
"""
|
||||
has_keywords = lambda args: all(isinstance(arg, nodes.Keyword)
|
||||
for arg in args)
|
||||
if not node.args and not node.kwargs:
|
||||
# dict()
|
||||
return nodes.Dict()
|
||||
elif has_keywords(node.args) and node.args:
|
||||
# dict(a=1, b=2, c=4)
|
||||
items = [(nodes.Const(arg.arg), arg.value) for arg in node.args]
|
||||
elif (len(node.args) >= 2 and
|
||||
has_keywords(node.args[1:])):
|
||||
# dict(some_iterable, b=2, c=4)
|
||||
elts = _get_elts(node.args[0], context)
|
||||
keys = [(nodes.Const(arg.arg), arg.value) for arg in node.args[1:]]
|
||||
items = elts + keys
|
||||
elif len(node.args) == 1:
|
||||
items = _get_elts(node.args[0], context)
|
||||
else:
|
||||
raise UseInferenceDefault()
|
||||
|
||||
empty = nodes.Dict()
|
||||
empty.items = items
|
||||
return empty
|
||||
|
||||
# Builtins inference
|
||||
register_builtin_transform(infer_tuple, 'tuple')
|
||||
register_builtin_transform(infer_set, 'set')
|
||||
register_builtin_transform(infer_list, 'list')
|
||||
register_builtin_transform(infer_dict, 'dict')
|
||||
155
plugins/bundle/python-mode/pymode/libs/astroid/brain/py2gi.py
Normal file
155
plugins/bundle/python-mode/pymode/libs/astroid/brain/py2gi.py
Normal file
|
|
@ -0,0 +1,155 @@
|
|||
"""Astroid hooks for the Python 2 GObject introspection bindings.
|
||||
|
||||
Helps with understanding everything imported from 'gi.repository'
|
||||
"""
|
||||
|
||||
import inspect
|
||||
import itertools
|
||||
import sys
|
||||
import re
|
||||
|
||||
from astroid import MANAGER, AstroidBuildingException
|
||||
from astroid.builder import AstroidBuilder
|
||||
|
||||
|
||||
_inspected_modules = {}
|
||||
|
||||
_identifier_re = r'^[A-Za-z_]\w*$'
|
||||
|
||||
def _gi_build_stub(parent):
|
||||
"""
|
||||
Inspect the passed module recursively and build stubs for functions,
|
||||
classes, etc.
|
||||
"""
|
||||
classes = {}
|
||||
functions = {}
|
||||
constants = {}
|
||||
methods = {}
|
||||
for name in dir(parent):
|
||||
if name.startswith("__"):
|
||||
continue
|
||||
|
||||
# Check if this is a valid name in python
|
||||
if not re.match(_identifier_re, name):
|
||||
continue
|
||||
|
||||
try:
|
||||
obj = getattr(parent, name)
|
||||
except:
|
||||
continue
|
||||
|
||||
if inspect.isclass(obj):
|
||||
classes[name] = obj
|
||||
elif (inspect.isfunction(obj) or
|
||||
inspect.isbuiltin(obj)):
|
||||
functions[name] = obj
|
||||
elif (inspect.ismethod(obj) or
|
||||
inspect.ismethoddescriptor(obj)):
|
||||
methods[name] = obj
|
||||
elif type(obj) in [int, str]:
|
||||
constants[name] = obj
|
||||
elif (str(obj).startswith("<flags") or
|
||||
str(obj).startswith("<enum ") or
|
||||
str(obj).startswith("<GType ") or
|
||||
inspect.isdatadescriptor(obj)):
|
||||
constants[name] = 0
|
||||
elif callable(obj):
|
||||
# Fall back to a function for anything callable
|
||||
functions[name] = obj
|
||||
else:
|
||||
# Assume everything else is some manner of constant
|
||||
constants[name] = 0
|
||||
|
||||
ret = ""
|
||||
|
||||
if constants:
|
||||
ret += "# %s contants\n\n" % parent.__name__
|
||||
for name in sorted(constants):
|
||||
if name[0].isdigit():
|
||||
# GDK has some busted constant names like
|
||||
# Gdk.EventType.2BUTTON_PRESS
|
||||
continue
|
||||
|
||||
val = constants[name]
|
||||
|
||||
strval = str(val)
|
||||
if type(val) is str:
|
||||
strval = '"%s"' % str(val).replace("\\", "\\\\")
|
||||
ret += "%s = %s\n" % (name, strval)
|
||||
|
||||
if ret:
|
||||
ret += "\n\n"
|
||||
if functions:
|
||||
ret += "# %s functions\n\n" % parent.__name__
|
||||
for name in sorted(functions):
|
||||
func = functions[name]
|
||||
ret += "def %s(*args, **kwargs):\n" % name
|
||||
ret += " pass\n"
|
||||
|
||||
if ret:
|
||||
ret += "\n\n"
|
||||
if methods:
|
||||
ret += "# %s methods\n\n" % parent.__name__
|
||||
for name in sorted(methods):
|
||||
func = methods[name]
|
||||
ret += "def %s(self, *args, **kwargs):\n" % name
|
||||
ret += " pass\n"
|
||||
|
||||
if ret:
|
||||
ret += "\n\n"
|
||||
if classes:
|
||||
ret += "# %s classes\n\n" % parent.__name__
|
||||
for name in sorted(classes):
|
||||
ret += "class %s(object):\n" % name
|
||||
|
||||
classret = _gi_build_stub(classes[name])
|
||||
if not classret:
|
||||
classret = "pass\n"
|
||||
|
||||
for line in classret.splitlines():
|
||||
ret += " " + line + "\n"
|
||||
ret += "\n"
|
||||
|
||||
return ret
|
||||
|
||||
def _import_gi_module(modname):
|
||||
# we only consider gi.repository submodules
|
||||
if not modname.startswith('gi.repository.'):
|
||||
raise AstroidBuildingException()
|
||||
# build astroid representation unless we already tried so
|
||||
if modname not in _inspected_modules:
|
||||
modnames = [modname]
|
||||
optional_modnames = []
|
||||
|
||||
# GLib and GObject may have some special case handling
|
||||
# in pygobject that we need to cope with. However at
|
||||
# least as of pygobject3-3.13.91 the _glib module doesn't
|
||||
# exist anymore, so if treat these modules as optional.
|
||||
if modname == 'gi.repository.GLib':
|
||||
optional_modnames.append('gi._glib')
|
||||
elif modname == 'gi.repository.GObject':
|
||||
optional_modnames.append('gi._gobject')
|
||||
|
||||
try:
|
||||
modcode = ''
|
||||
for m in itertools.chain(modnames, optional_modnames):
|
||||
try:
|
||||
__import__(m)
|
||||
modcode += _gi_build_stub(sys.modules[m])
|
||||
except ImportError:
|
||||
if m not in optional_modnames:
|
||||
raise
|
||||
except ImportError:
|
||||
astng = _inspected_modules[modname] = None
|
||||
else:
|
||||
astng = AstroidBuilder(MANAGER).string_build(modcode, modname)
|
||||
_inspected_modules[modname] = astng
|
||||
else:
|
||||
astng = _inspected_modules[modname]
|
||||
if astng is None:
|
||||
raise AstroidBuildingException('Failed to import module %r' % modname)
|
||||
return astng
|
||||
|
||||
|
||||
MANAGER.register_failed_import_hook(_import_gi_module)
|
||||
|
||||
|
|
@ -0,0 +1,18 @@
|
|||
from astroid import MANAGER, register_module_extender
|
||||
from astroid.builder import AstroidBuilder
|
||||
|
||||
def mechanize_transform():
|
||||
return AstroidBuilder(MANAGER).string_build('''
|
||||
|
||||
class Browser(object):
|
||||
def open(self, url, data=None, timeout=None):
|
||||
return None
|
||||
def open_novisit(self, url, data=None, timeout=None):
|
||||
return None
|
||||
def open_local_file(self, filename):
|
||||
return None
|
||||
|
||||
''')
|
||||
|
||||
|
||||
register_module_extender(MANAGER, 'mechanize', mechanize_transform)
|
||||
|
|
@ -0,0 +1,31 @@
|
|||
"""Astroid hooks for pytest."""
|
||||
|
||||
from astroid import MANAGER, register_module_extender
|
||||
from astroid.builder import AstroidBuilder
|
||||
|
||||
|
||||
def pytest_transform():
|
||||
return AstroidBuilder(MANAGER).string_build('''
|
||||
|
||||
try:
|
||||
import _pytest.mark
|
||||
import _pytest.recwarn
|
||||
import _pytest.runner
|
||||
import _pytest.python
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
deprecated_call = _pytest.recwarn.deprecated_call
|
||||
exit = _pytest.runner.exit
|
||||
fail = _pytest.runner.fail
|
||||
fixture = _pytest.python.fixture
|
||||
importorskip = _pytest.runner.importorskip
|
||||
mark = _pytest.mark.MarkGenerator()
|
||||
raises = _pytest.python.raises
|
||||
skip = _pytest.runner.skip
|
||||
yield_fixture = _pytest.python.yield_fixture
|
||||
|
||||
''')
|
||||
|
||||
register_module_extender(MANAGER, 'pytest', pytest_transform)
|
||||
register_module_extender(MANAGER, 'py.test', pytest_transform)
|
||||
|
|
@ -0,0 +1,22 @@
|
|||
"""Astroid hooks for the Python 2 qt4 module.
|
||||
|
||||
Currently help understanding of :
|
||||
|
||||
* PyQT4.QtCore
|
||||
"""
|
||||
|
||||
from astroid import MANAGER, register_module_extender
|
||||
from astroid.builder import AstroidBuilder
|
||||
|
||||
|
||||
def pyqt4_qtcore_transform():
|
||||
return AstroidBuilder(MANAGER).string_build('''
|
||||
|
||||
def SIGNAL(signal_name): pass
|
||||
|
||||
class QObject(object):
|
||||
def emit(self, signal): pass
|
||||
''')
|
||||
|
||||
|
||||
register_module_extender(MANAGER, 'PyQt4.QtCore', pyqt4_qtcore_transform)
|
||||
|
|
@ -0,0 +1,334 @@
|
|||
|
||||
"""Astroid hooks for the Python 2 standard library.
|
||||
|
||||
Currently help understanding of :
|
||||
|
||||
* hashlib.md5 and hashlib.sha1
|
||||
"""
|
||||
|
||||
import sys
|
||||
from functools import partial
|
||||
from textwrap import dedent
|
||||
|
||||
from astroid import (
|
||||
MANAGER, AsStringRegexpPredicate,
|
||||
UseInferenceDefault, inference_tip,
|
||||
YES, InferenceError, register_module_extender)
|
||||
from astroid import exceptions
|
||||
from astroid import nodes
|
||||
from astroid.builder import AstroidBuilder
|
||||
|
||||
PY3K = sys.version_info > (3, 0)
|
||||
PY33 = sys.version_info >= (3, 3)
|
||||
|
||||
# general function
|
||||
|
||||
def infer_func_form(node, base_type, context=None, enum=False):
|
||||
"""Specific inference function for namedtuple or Python 3 enum. """
|
||||
def infer_first(node):
|
||||
try:
|
||||
value = next(node.infer(context=context))
|
||||
if value is YES:
|
||||
raise UseInferenceDefault()
|
||||
else:
|
||||
return value
|
||||
except StopIteration:
|
||||
raise InferenceError()
|
||||
|
||||
# node is a CallFunc node, class name as first argument and generated class
|
||||
# attributes as second argument
|
||||
if len(node.args) != 2:
|
||||
# something weird here, go back to class implementation
|
||||
raise UseInferenceDefault()
|
||||
# namedtuple or enums list of attributes can be a list of strings or a
|
||||
# whitespace-separate string
|
||||
try:
|
||||
name = infer_first(node.args[0]).value
|
||||
names = infer_first(node.args[1])
|
||||
try:
|
||||
attributes = names.value.replace(',', ' ').split()
|
||||
except AttributeError:
|
||||
if not enum:
|
||||
attributes = [infer_first(const).value for const in names.elts]
|
||||
else:
|
||||
# Enums supports either iterator of (name, value) pairs
|
||||
# or mappings.
|
||||
# TODO: support only list, tuples and mappings.
|
||||
if hasattr(names, 'items') and isinstance(names.items, list):
|
||||
attributes = [infer_first(const[0]).value
|
||||
for const in names.items
|
||||
if isinstance(const[0], nodes.Const)]
|
||||
elif hasattr(names, 'elts'):
|
||||
# Enums can support either ["a", "b", "c"]
|
||||
# or [("a", 1), ("b", 2), ...], but they can't
|
||||
# be mixed.
|
||||
if all(isinstance(const, nodes.Tuple)
|
||||
for const in names.elts):
|
||||
attributes = [infer_first(const.elts[0]).value
|
||||
for const in names.elts
|
||||
if isinstance(const, nodes.Tuple)]
|
||||
else:
|
||||
attributes = [infer_first(const).value
|
||||
for const in names.elts]
|
||||
else:
|
||||
raise AttributeError
|
||||
if not attributes:
|
||||
raise AttributeError
|
||||
except (AttributeError, exceptions.InferenceError) as exc:
|
||||
raise UseInferenceDefault()
|
||||
# we want to return a Class node instance with proper attributes set
|
||||
class_node = nodes.Class(name, 'docstring')
|
||||
class_node.parent = node.parent
|
||||
# set base class=tuple
|
||||
class_node.bases.append(base_type)
|
||||
# XXX add __init__(*attributes) method
|
||||
for attr in attributes:
|
||||
fake_node = nodes.EmptyNode()
|
||||
fake_node.parent = class_node
|
||||
class_node.instance_attrs[attr] = [fake_node]
|
||||
return class_node, name, attributes
|
||||
|
||||
|
||||
# module specific transformation functions #####################################
|
||||
|
||||
def hashlib_transform():
|
||||
template = '''
|
||||
|
||||
class %(name)s(object):
|
||||
def __init__(self, value=''): pass
|
||||
def digest(self):
|
||||
return %(digest)s
|
||||
def copy(self):
|
||||
return self
|
||||
def update(self, value): pass
|
||||
def hexdigest(self):
|
||||
return ''
|
||||
@property
|
||||
def name(self):
|
||||
return %(name)r
|
||||
@property
|
||||
def block_size(self):
|
||||
return 1
|
||||
@property
|
||||
def digest_size(self):
|
||||
return 1
|
||||
'''
|
||||
algorithms = ('md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512')
|
||||
classes = "".join(
|
||||
template % {'name': hashfunc, 'digest': 'b""' if PY3K else '""'}
|
||||
for hashfunc in algorithms)
|
||||
return AstroidBuilder(MANAGER).string_build(classes)
|
||||
|
||||
|
||||
def collections_transform():
|
||||
return AstroidBuilder(MANAGER).string_build('''
|
||||
|
||||
class defaultdict(dict):
|
||||
default_factory = None
|
||||
def __missing__(self, key): pass
|
||||
|
||||
class deque(object):
|
||||
maxlen = 0
|
||||
def __init__(self, iterable=None, maxlen=None): pass
|
||||
def append(self, x): pass
|
||||
def appendleft(self, x): pass
|
||||
def clear(self): pass
|
||||
def count(self, x): return 0
|
||||
def extend(self, iterable): pass
|
||||
def extendleft(self, iterable): pass
|
||||
def pop(self): pass
|
||||
def popleft(self): pass
|
||||
def remove(self, value): pass
|
||||
def reverse(self): pass
|
||||
def rotate(self, n): pass
|
||||
def __iter__(self): return self
|
||||
|
||||
''')
|
||||
|
||||
|
||||
def pkg_resources_transform():
|
||||
return AstroidBuilder(MANAGER).string_build('''
|
||||
|
||||
def resource_exists(package_or_requirement, resource_name):
|
||||
pass
|
||||
|
||||
def resource_isdir(package_or_requirement, resource_name):
|
||||
pass
|
||||
|
||||
def resource_filename(package_or_requirement, resource_name):
|
||||
pass
|
||||
|
||||
def resource_stream(package_or_requirement, resource_name):
|
||||
pass
|
||||
|
||||
def resource_string(package_or_requirement, resource_name):
|
||||
pass
|
||||
|
||||
def resource_listdir(package_or_requirement, resource_name):
|
||||
pass
|
||||
|
||||
def extraction_error():
|
||||
pass
|
||||
|
||||
def get_cache_path(archive_name, names=()):
|
||||
pass
|
||||
|
||||
def postprocess(tempname, filename):
|
||||
pass
|
||||
|
||||
def set_extraction_path(path):
|
||||
pass
|
||||
|
||||
def cleanup_resources(force=False):
|
||||
pass
|
||||
|
||||
''')
|
||||
|
||||
|
||||
def subprocess_transform():
|
||||
if PY3K:
|
||||
communicate = (bytes('string', 'ascii'), bytes('string', 'ascii'))
|
||||
init = """
|
||||
def __init__(self, args, bufsize=0, executable=None,
|
||||
stdin=None, stdout=None, stderr=None,
|
||||
preexec_fn=None, close_fds=False, shell=False,
|
||||
cwd=None, env=None, universal_newlines=False,
|
||||
startupinfo=None, creationflags=0, restore_signals=True,
|
||||
start_new_session=False, pass_fds=()):
|
||||
pass
|
||||
"""
|
||||
else:
|
||||
communicate = ('string', 'string')
|
||||
init = """
|
||||
def __init__(self, args, bufsize=0, executable=None,
|
||||
stdin=None, stdout=None, stderr=None,
|
||||
preexec_fn=None, close_fds=False, shell=False,
|
||||
cwd=None, env=None, universal_newlines=False,
|
||||
startupinfo=None, creationflags=0):
|
||||
pass
|
||||
"""
|
||||
if PY33:
|
||||
wait_signature = 'def wait(self, timeout=None)'
|
||||
else:
|
||||
wait_signature = 'def wait(self)'
|
||||
return AstroidBuilder(MANAGER).string_build('''
|
||||
|
||||
class Popen(object):
|
||||
returncode = pid = 0
|
||||
stdin = stdout = stderr = file()
|
||||
|
||||
%(init)s
|
||||
|
||||
def communicate(self, input=None):
|
||||
return %(communicate)r
|
||||
%(wait_signature)s:
|
||||
return self.returncode
|
||||
def poll(self):
|
||||
return self.returncode
|
||||
def send_signal(self, signal):
|
||||
pass
|
||||
def terminate(self):
|
||||
pass
|
||||
def kill(self):
|
||||
pass
|
||||
''' % {'init': init,
|
||||
'communicate': communicate,
|
||||
'wait_signature': wait_signature})
|
||||
|
||||
|
||||
# namedtuple support ###########################################################
|
||||
|
||||
def looks_like_namedtuple(node):
|
||||
func = node.func
|
||||
if type(func) is nodes.Getattr:
|
||||
return func.attrname == 'namedtuple'
|
||||
if type(func) is nodes.Name:
|
||||
return func.name == 'namedtuple'
|
||||
return False
|
||||
|
||||
def infer_named_tuple(node, context=None):
|
||||
"""Specific inference function for namedtuple CallFunc node"""
|
||||
class_node, name, attributes = infer_func_form(node, nodes.Tuple._proxied,
|
||||
context=context)
|
||||
fake = AstroidBuilder(MANAGER).string_build('''
|
||||
class %(name)s(tuple):
|
||||
_fields = %(fields)r
|
||||
def _asdict(self):
|
||||
return self.__dict__
|
||||
@classmethod
|
||||
def _make(cls, iterable, new=tuple.__new__, len=len):
|
||||
return new(cls, iterable)
|
||||
def _replace(_self, **kwds):
|
||||
result = _self._make(map(kwds.pop, %(fields)r, _self))
|
||||
if kwds:
|
||||
raise ValueError('Got unexpected field names: %%r' %% list(kwds))
|
||||
return result
|
||||
''' % {'name': name, 'fields': attributes})
|
||||
class_node.locals['_asdict'] = fake.body[0].locals['_asdict']
|
||||
class_node.locals['_make'] = fake.body[0].locals['_make']
|
||||
class_node.locals['_replace'] = fake.body[0].locals['_replace']
|
||||
class_node.locals['_fields'] = fake.body[0].locals['_fields']
|
||||
# we use UseInferenceDefault, we can't be a generator so return an iterator
|
||||
return iter([class_node])
|
||||
|
||||
def infer_enum(node, context=None):
|
||||
""" Specific inference function for enum CallFunc node. """
|
||||
enum_meta = nodes.Class("EnumMeta", 'docstring')
|
||||
class_node = infer_func_form(node, enum_meta,
|
||||
context=context, enum=True)[0]
|
||||
return iter([class_node.instanciate_class()])
|
||||
|
||||
def infer_enum_class(node):
|
||||
""" Specific inference for enums. """
|
||||
names = set(('Enum', 'IntEnum', 'enum.Enum', 'enum.IntEnum'))
|
||||
for basename in node.basenames:
|
||||
# TODO: doesn't handle subclasses yet. This implementation
|
||||
# is a hack to support enums.
|
||||
if basename not in names:
|
||||
continue
|
||||
if node.root().name == 'enum':
|
||||
# Skip if the class is directly from enum module.
|
||||
break
|
||||
for local, values in node.locals.items():
|
||||
if any(not isinstance(value, nodes.AssName)
|
||||
for value in values):
|
||||
continue
|
||||
|
||||
stmt = values[0].statement()
|
||||
if isinstance(stmt.targets[0], nodes.Tuple):
|
||||
targets = stmt.targets[0].itered()
|
||||
else:
|
||||
targets = stmt.targets
|
||||
|
||||
new_targets = []
|
||||
for target in targets:
|
||||
# Replace all the assignments with our mocked class.
|
||||
classdef = dedent('''
|
||||
class %(name)s(object):
|
||||
@property
|
||||
def value(self):
|
||||
# Not the best return.
|
||||
return None
|
||||
@property
|
||||
def name(self):
|
||||
return %(name)r
|
||||
''' % {'name': target.name})
|
||||
fake = AstroidBuilder(MANAGER).string_build(classdef)[target.name]
|
||||
fake.parent = target.parent
|
||||
for method in node.mymethods():
|
||||
fake.locals[method.name] = [method]
|
||||
new_targets.append(fake.instanciate_class())
|
||||
node.locals[local] = new_targets
|
||||
break
|
||||
return node
|
||||
|
||||
|
||||
MANAGER.register_transform(nodes.CallFunc, inference_tip(infer_named_tuple),
|
||||
looks_like_namedtuple)
|
||||
MANAGER.register_transform(nodes.CallFunc, inference_tip(infer_enum),
|
||||
AsStringRegexpPredicate('Enum', 'func'))
|
||||
MANAGER.register_transform(nodes.Class, infer_enum_class)
|
||||
register_module_extender(MANAGER, 'hashlib', hashlib_transform)
|
||||
register_module_extender(MANAGER, 'collections', collections_transform)
|
||||
register_module_extender(MANAGER, 'pkg_resources', pkg_resources_transform)
|
||||
register_module_extender(MANAGER, 'subprocess', subprocess_transform)
|
||||
|
|
@ -0,0 +1,79 @@
|
|||
# copyright 2003-2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
|
||||
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
|
||||
#
|
||||
# This file is part of astroid.
|
||||
#
|
||||
# astroid is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by the
|
||||
# Free Software Foundation, either version 2.1 of the License, or (at your
|
||||
# option) any later version.
|
||||
#
|
||||
# astroid is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
|
||||
# for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License along
|
||||
# with astroid. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
"""Hooks for nose library."""
|
||||
|
||||
import re
|
||||
import textwrap
|
||||
|
||||
import astroid
|
||||
import astroid.builder
|
||||
|
||||
_BUILDER = astroid.builder.AstroidBuilder(astroid.MANAGER)
|
||||
|
||||
|
||||
def _pep8(name, caps=re.compile('([A-Z])')):
|
||||
return caps.sub(lambda m: '_' + m.groups()[0].lower(), name)
|
||||
|
||||
|
||||
def _nose_tools_functions():
|
||||
"""Get an iterator of names and bound methods."""
|
||||
module = _BUILDER.string_build(textwrap.dedent('''
|
||||
import unittest
|
||||
|
||||
class Test(unittest.TestCase):
|
||||
pass
|
||||
a = Test()
|
||||
'''))
|
||||
try:
|
||||
case = next(module['a'].infer())
|
||||
except astroid.InferenceError:
|
||||
return
|
||||
for method in case.methods():
|
||||
if method.name.startswith('assert') and '_' not in method.name:
|
||||
pep8_name = _pep8(method.name)
|
||||
yield pep8_name, astroid.BoundMethod(method, case)
|
||||
|
||||
|
||||
def _nose_tools_transform(node):
|
||||
for method_name, method in _nose_tools_functions():
|
||||
node.locals[method_name] = [method]
|
||||
|
||||
|
||||
def _nose_tools_trivial_transform():
|
||||
"""Custom transform for the nose.tools module."""
|
||||
stub = _BUILDER.string_build('''__all__ = []''')
|
||||
all_entries = ['ok_', 'eq_']
|
||||
|
||||
for pep8_name, method in _nose_tools_functions():
|
||||
all_entries.append(pep8_name)
|
||||
stub[pep8_name] = method
|
||||
|
||||
# Update the __all__ variable, since nose.tools
|
||||
# does this manually with .append.
|
||||
all_assign = stub['__all__'].parent
|
||||
all_object = astroid.List(all_entries)
|
||||
all_object.parent = all_assign
|
||||
all_assign.value = all_object
|
||||
return stub
|
||||
|
||||
|
||||
astroid.register_module_extender(astroid.MANAGER, 'nose.tools.trivial',
|
||||
_nose_tools_trivial_transform)
|
||||
astroid.MANAGER.register_transform(astroid.Module, _nose_tools_transform,
|
||||
lambda n: n.name == 'nose.tools')
|
||||
|
|
@ -0,0 +1,261 @@
|
|||
# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
|
||||
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
|
||||
#
|
||||
# This file is part of astroid.
|
||||
#
|
||||
# astroid is free software: you can redistribute it and/or modify it under
|
||||
# the terms of the GNU Lesser General Public License as published by the Free
|
||||
# Software Foundation, either version 2.1 of the License, or (at your option) any
|
||||
# later version.
|
||||
#
|
||||
# astroid is distributed in the hope that it will be useful, but WITHOUT
|
||||
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
|
||||
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
|
||||
# details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License along
|
||||
# with astroid. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
"""Astroid hooks for six.moves."""
|
||||
|
||||
import sys
|
||||
from textwrap import dedent
|
||||
|
||||
from astroid import MANAGER, register_module_extender
|
||||
from astroid.builder import AstroidBuilder
|
||||
from astroid.exceptions import AstroidBuildingException
|
||||
|
||||
def _indent(text, prefix, predicate=None):
|
||||
"""Adds 'prefix' to the beginning of selected lines in 'text'.
|
||||
|
||||
If 'predicate' is provided, 'prefix' will only be added to the lines
|
||||
where 'predicate(line)' is True. If 'predicate' is not provided,
|
||||
it will default to adding 'prefix' to all non-empty lines that do not
|
||||
consist solely of whitespace characters.
|
||||
"""
|
||||
if predicate is None:
|
||||
predicate = lambda line: line.strip()
|
||||
|
||||
def prefixed_lines():
|
||||
for line in text.splitlines(True):
|
||||
yield prefix + line if predicate(line) else line
|
||||
return ''.join(prefixed_lines())
|
||||
|
||||
|
||||
if sys.version_info[0] == 2:
|
||||
_IMPORTS_2 = """
|
||||
import BaseHTTPServer
|
||||
import CGIHTTPServer
|
||||
import SimpleHTTPServer
|
||||
|
||||
from StringIO import StringIO
|
||||
from cStringIO import StringIO as cStringIO
|
||||
from UserDict import UserDict
|
||||
from UserList import UserList
|
||||
from UserString import UserString
|
||||
|
||||
import __builtin__ as builtins
|
||||
import thread as _thread
|
||||
import dummy_thread as _dummy_thread
|
||||
import ConfigParser as configparser
|
||||
import copy_reg as copyreg
|
||||
from itertools import (imap as map,
|
||||
ifilter as filter,
|
||||
ifilterfalse as filterfalse,
|
||||
izip_longest as zip_longest,
|
||||
izip as zip)
|
||||
import htmlentitydefs as html_entities
|
||||
import HTMLParser as html_parser
|
||||
import httplib as http_client
|
||||
import cookielib as http_cookiejar
|
||||
import Cookie as http_cookies
|
||||
import Queue as queue
|
||||
import repr as reprlib
|
||||
from pipes import quote as shlex_quote
|
||||
import SocketServer as socketserver
|
||||
import SimpleXMLRPCServer as xmlrpc_server
|
||||
import xmlrpclib as xmlrpc_client
|
||||
import _winreg as winreg
|
||||
import robotparser as urllib_robotparser
|
||||
import Tkinter as tkinter
|
||||
import tkFileDialog as tkinter_tkfiledialog
|
||||
|
||||
input = raw_input
|
||||
intern = intern
|
||||
range = xrange
|
||||
xrange = xrange
|
||||
reduce = reduce
|
||||
reload_module = reload
|
||||
|
||||
class UrllibParse(object):
|
||||
import urlparse as _urlparse
|
||||
import urllib as _urllib
|
||||
ParseResult = _urlparse.ParseResult
|
||||
SplitResult = _urlparse.SplitResult
|
||||
parse_qs = _urlparse.parse_qs
|
||||
parse_qsl = _urlparse.parse_qsl
|
||||
urldefrag = _urlparse.urldefrag
|
||||
urljoin = _urlparse.urljoin
|
||||
urlparse = _urlparse.urlparse
|
||||
urlsplit = _urlparse.urlsplit
|
||||
urlunparse = _urlparse.urlunparse
|
||||
urlunsplit = _urlparse.urlunsplit
|
||||
quote = _urllib.quote
|
||||
quote_plus = _urllib.quote_plus
|
||||
unquote = _urllib.unquote
|
||||
unquote_plus = _urllib.unquote_plus
|
||||
urlencode = _urllib.urlencode
|
||||
splitquery = _urllib.splitquery
|
||||
splittag = _urllib.splittag
|
||||
splituser = _urllib.splituser
|
||||
uses_fragment = _urlparse.uses_fragment
|
||||
uses_netloc = _urlparse.uses_netloc
|
||||
uses_params = _urlparse.uses_params
|
||||
uses_query = _urlparse.uses_query
|
||||
uses_relative = _urlparse.uses_relative
|
||||
|
||||
class UrllibError(object):
|
||||
import urllib2 as _urllib2
|
||||
import urllib as _urllib
|
||||
URLError = _urllib2.URLError
|
||||
HTTPError = _urllib2.HTTPError
|
||||
ContentTooShortError = _urllib.ContentTooShortError
|
||||
|
||||
class DummyModule(object):
|
||||
pass
|
||||
|
||||
class UrllibRequest(object):
|
||||
import urlparse as _urlparse
|
||||
import urllib2 as _urllib2
|
||||
import urllib as _urllib
|
||||
urlopen = _urllib2.urlopen
|
||||
install_opener = _urllib2.install_opener
|
||||
build_opener = _urllib2.build_opener
|
||||
pathname2url = _urllib.pathname2url
|
||||
url2pathname = _urllib.url2pathname
|
||||
getproxies = _urllib.getproxies
|
||||
Request = _urllib2.Request
|
||||
OpenerDirector = _urllib2.OpenerDirector
|
||||
HTTPDefaultErrorHandler = _urllib2.HTTPDefaultErrorHandler
|
||||
HTTPRedirectHandler = _urllib2.HTTPRedirectHandler
|
||||
HTTPCookieProcessor = _urllib2.HTTPCookieProcessor
|
||||
ProxyHandler = _urllib2.ProxyHandler
|
||||
BaseHandler = _urllib2.BaseHandler
|
||||
HTTPPasswordMgr = _urllib2.HTTPPasswordMgr
|
||||
HTTPPasswordMgrWithDefaultRealm = _urllib2.HTTPPasswordMgrWithDefaultRealm
|
||||
AbstractBasicAuthHandler = _urllib2.AbstractBasicAuthHandler
|
||||
HTTPBasicAuthHandler = _urllib2.HTTPBasicAuthHandler
|
||||
ProxyBasicAuthHandler = _urllib2.ProxyBasicAuthHandler
|
||||
AbstractDigestAuthHandler = _urllib2.AbstractDigestAuthHandler
|
||||
HTTPDigestAuthHandler = _urllib2.HTTPDigestAuthHandler
|
||||
ProxyDigestAuthHandler = _urllib2.ProxyDigestAuthHandler
|
||||
HTTPHandler = _urllib2.HTTPHandler
|
||||
HTTPSHandler = _urllib2.HTTPSHandler
|
||||
FileHandler = _urllib2.FileHandler
|
||||
FTPHandler = _urllib2.FTPHandler
|
||||
CacheFTPHandler = _urllib2.CacheFTPHandler
|
||||
UnknownHandler = _urllib2.UnknownHandler
|
||||
HTTPErrorProcessor = _urllib2.HTTPErrorProcessor
|
||||
urlretrieve = _urllib.urlretrieve
|
||||
urlcleanup = _urllib.urlcleanup
|
||||
proxy_bypass = _urllib.proxy_bypass
|
||||
|
||||
urllib_parse = UrllibParse()
|
||||
urllib_error = UrllibError()
|
||||
urllib = DummyModule()
|
||||
urllib.request = UrllibRequest()
|
||||
urllib.parse = UrllibParse()
|
||||
urllib.error = UrllibError()
|
||||
"""
|
||||
else:
|
||||
_IMPORTS_3 = """
|
||||
import _io
|
||||
cStringIO = _io.StringIO
|
||||
filter = filter
|
||||
from itertools import filterfalse
|
||||
input = input
|
||||
from sys import intern
|
||||
map = map
|
||||
range = range
|
||||
from imp import reload as reload_module
|
||||
from functools import reduce
|
||||
from shlex import quote as shlex_quote
|
||||
from io import StringIO
|
||||
from collections import UserDict, UserList, UserString
|
||||
xrange = range
|
||||
zip = zip
|
||||
from itertools import zip_longest
|
||||
import builtins
|
||||
import configparser
|
||||
import copyreg
|
||||
import _dummy_thread
|
||||
import http.cookiejar as http_cookiejar
|
||||
import http.cookies as http_cookies
|
||||
import html.entities as html_entities
|
||||
import html.parser as html_parser
|
||||
import http.client as http_client
|
||||
import http.server
|
||||
BaseHTTPServer = CGIHTTPServer = SimpleHTTPServer = http.server
|
||||
import pickle as cPickle
|
||||
import queue
|
||||
import reprlib
|
||||
import socketserver
|
||||
import _thread
|
||||
import winreg
|
||||
import xmlrpc.server as xmlrpc_server
|
||||
import xmlrpc.client as xmlrpc_client
|
||||
import urllib.robotparser as urllib_robotparser
|
||||
import email.mime.multipart as email_mime_multipart
|
||||
import email.mime.nonmultipart as email_mime_nonmultipart
|
||||
import email.mime.text as email_mime_text
|
||||
import email.mime.base as email_mime_base
|
||||
import urllib.parse as urllib_parse
|
||||
import urllib.error as urllib_error
|
||||
import tkinter
|
||||
import tkinter.dialog as tkinter_dialog
|
||||
import tkinter.filedialog as tkinter_filedialog
|
||||
import tkinter.scrolledtext as tkinter_scrolledtext
|
||||
import tkinter.simpledialog as tkinder_simpledialog
|
||||
import tkinter.tix as tkinter_tix
|
||||
import tkinter.ttk as tkinter_ttk
|
||||
import tkinter.constants as tkinter_constants
|
||||
import tkinter.dnd as tkinter_dnd
|
||||
import tkinter.colorchooser as tkinter_colorchooser
|
||||
import tkinter.commondialog as tkinter_commondialog
|
||||
import tkinter.filedialog as tkinter_tkfiledialog
|
||||
import tkinter.font as tkinter_font
|
||||
import tkinter.messagebox as tkinter_messagebox
|
||||
import urllib.request
|
||||
import urllib.robotparser as urllib_robotparser
|
||||
import urllib.parse as urllib_parse
|
||||
import urllib.error as urllib_error
|
||||
"""
|
||||
if sys.version_info[0] == 2:
|
||||
_IMPORTS = dedent(_IMPORTS_2)
|
||||
else:
|
||||
_IMPORTS = dedent(_IMPORTS_3)
|
||||
|
||||
|
||||
def six_moves_transform():
|
||||
code = dedent('''
|
||||
class Moves(object):
|
||||
{}
|
||||
moves = Moves()
|
||||
''').format(_indent(_IMPORTS, " "))
|
||||
module = AstroidBuilder(MANAGER).string_build(code)
|
||||
module.name = 'six.moves'
|
||||
return module
|
||||
|
||||
|
||||
def _six_fail_hook(modname):
|
||||
if modname != 'six.moves':
|
||||
raise AstroidBuildingException
|
||||
module = AstroidBuilder(MANAGER).string_build(_IMPORTS)
|
||||
module.name = 'six.moves'
|
||||
return module
|
||||
|
||||
|
||||
register_module_extender(MANAGER, 'six', six_moves_transform)
|
||||
register_module_extender(MANAGER, 'requests.packages.urllib3.packages.six',
|
||||
six_moves_transform)
|
||||
MANAGER.register_failed_import_hook(_six_fail_hook)
|
||||
240
plugins/bundle/python-mode/pymode/libs/astroid/builder.py
Normal file
240
plugins/bundle/python-mode/pymode/libs/astroid/builder.py
Normal file
|
|
@ -0,0 +1,240 @@
|
|||
# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
|
||||
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
|
||||
#
|
||||
# This file is part of astroid.
|
||||
#
|
||||
# astroid is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by the
|
||||
# Free Software Foundation, either version 2.1 of the License, or (at your
|
||||
# option) any later version.
|
||||
#
|
||||
# astroid is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
|
||||
# for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License along
|
||||
# with astroid. If not, see <http://www.gnu.org/licenses/>.
|
||||
"""The AstroidBuilder makes astroid from living object and / or from _ast
|
||||
|
||||
The builder is not thread safe and can't be used to parse different sources
|
||||
at the same time.
|
||||
"""
|
||||
from __future__ import with_statement
|
||||
|
||||
__docformat__ = "restructuredtext en"
|
||||
|
||||
import sys
|
||||
from os.path import splitext, basename, exists, abspath
|
||||
|
||||
from astroid.exceptions import AstroidBuildingException, InferenceError
|
||||
from astroid.raw_building import InspectBuilder
|
||||
from astroid.rebuilder import TreeRebuilder
|
||||
from astroid.manager import AstroidManager
|
||||
from astroid.bases import YES, Instance
|
||||
from astroid.modutils import modpath_from_file
|
||||
|
||||
from _ast import PyCF_ONLY_AST
|
||||
def parse(string):
|
||||
return compile(string, "<string>", 'exec', PyCF_ONLY_AST)
|
||||
|
||||
if sys.version_info >= (3, 0):
|
||||
from tokenize import detect_encoding
|
||||
|
||||
def open_source_file(filename):
|
||||
with open(filename, 'rb') as byte_stream:
|
||||
encoding = detect_encoding(byte_stream.readline)[0]
|
||||
stream = open(filename, 'r', newline=None, encoding=encoding)
|
||||
try:
|
||||
data = stream.read()
|
||||
except UnicodeError: # wrong encodingg
|
||||
# detect_encoding returns utf-8 if no encoding specified
|
||||
msg = 'Wrong (%s) or no encoding specified' % encoding
|
||||
raise AstroidBuildingException(msg)
|
||||
return stream, encoding, data
|
||||
|
||||
else:
|
||||
import re
|
||||
|
||||
_ENCODING_RGX = re.compile(r"\s*#+.*coding[:=]\s*([-\w.]+)")
|
||||
|
||||
def _guess_encoding(string):
|
||||
"""get encoding from a python file as string or return None if not found
|
||||
"""
|
||||
# check for UTF-8 byte-order mark
|
||||
if string.startswith('\xef\xbb\xbf'):
|
||||
return 'UTF-8'
|
||||
for line in string.split('\n', 2)[:2]:
|
||||
# check for encoding declaration
|
||||
match = _ENCODING_RGX.match(line)
|
||||
if match is not None:
|
||||
return match.group(1)
|
||||
|
||||
def open_source_file(filename):
|
||||
"""get data for parsing a file"""
|
||||
stream = open(filename, 'U')
|
||||
data = stream.read()
|
||||
encoding = _guess_encoding(data)
|
||||
return stream, encoding, data
|
||||
|
||||
# ast NG builder ##############################################################
|
||||
|
||||
MANAGER = AstroidManager()
|
||||
|
||||
class AstroidBuilder(InspectBuilder):
|
||||
"""provide astroid building methods"""
|
||||
|
||||
def __init__(self, manager=None):
|
||||
InspectBuilder.__init__(self)
|
||||
self._manager = manager or MANAGER
|
||||
|
||||
def module_build(self, module, modname=None):
|
||||
"""build an astroid from a living module instance
|
||||
"""
|
||||
node = None
|
||||
path = getattr(module, '__file__', None)
|
||||
if path is not None:
|
||||
path_, ext = splitext(module.__file__)
|
||||
if ext in ('.py', '.pyc', '.pyo') and exists(path_ + '.py'):
|
||||
node = self.file_build(path_ + '.py', modname)
|
||||
if node is None:
|
||||
# this is a built-in module
|
||||
# get a partial representation by introspection
|
||||
node = self.inspect_build(module, modname=modname, path=path)
|
||||
# we have to handle transformation by ourselves since the rebuilder
|
||||
# isn't called for builtin nodes
|
||||
#
|
||||
# XXX it's then only called for Module nodes, not for underlying
|
||||
# nodes
|
||||
node = self._manager.transform(node)
|
||||
return node
|
||||
|
||||
def file_build(self, path, modname=None):
|
||||
"""build astroid from a source code file (i.e. from an ast)
|
||||
|
||||
path is expected to be a python source file
|
||||
"""
|
||||
try:
|
||||
stream, encoding, data = open_source_file(path)
|
||||
except IOError as exc:
|
||||
msg = 'Unable to load file %r (%s)' % (path, exc)
|
||||
raise AstroidBuildingException(msg)
|
||||
except SyntaxError as exc: # py3k encoding specification error
|
||||
raise AstroidBuildingException(exc)
|
||||
except LookupError as exc: # unknown encoding
|
||||
raise AstroidBuildingException(exc)
|
||||
with stream:
|
||||
# get module name if necessary
|
||||
if modname is None:
|
||||
try:
|
||||
modname = '.'.join(modpath_from_file(path))
|
||||
except ImportError:
|
||||
modname = splitext(basename(path))[0]
|
||||
# build astroid representation
|
||||
module = self._data_build(data, modname, path)
|
||||
return self._post_build(module, encoding)
|
||||
|
||||
def string_build(self, data, modname='', path=None):
|
||||
"""build astroid from source code string and return rebuilded astroid"""
|
||||
module = self._data_build(data, modname, path)
|
||||
module.file_bytes = data.encode('utf-8')
|
||||
return self._post_build(module, 'utf-8')
|
||||
|
||||
def _post_build(self, module, encoding):
|
||||
"""handles encoding and delayed nodes
|
||||
after a module has been built
|
||||
"""
|
||||
module.file_encoding = encoding
|
||||
self._manager.cache_module(module)
|
||||
# post tree building steps after we stored the module in the cache:
|
||||
for from_node in module._from_nodes:
|
||||
if from_node.modname == '__future__':
|
||||
for symbol, _ in from_node.names:
|
||||
module.future_imports.add(symbol)
|
||||
self.add_from_names_to_locals(from_node)
|
||||
# handle delayed assattr nodes
|
||||
for delayed in module._delayed_assattr:
|
||||
self.delayed_assattr(delayed)
|
||||
return module
|
||||
|
||||
def _data_build(self, data, modname, path):
|
||||
"""build tree node from data and add some informations"""
|
||||
# this method could be wrapped with a pickle/cache function
|
||||
try:
|
||||
node = parse(data + '\n')
|
||||
except TypeError as exc:
|
||||
raise AstroidBuildingException(exc)
|
||||
if path is not None:
|
||||
node_file = abspath(path)
|
||||
else:
|
||||
node_file = '<?>'
|
||||
if modname.endswith('.__init__'):
|
||||
modname = modname[:-9]
|
||||
package = True
|
||||
else:
|
||||
package = path and path.find('__init__.py') > -1 or False
|
||||
rebuilder = TreeRebuilder(self._manager)
|
||||
module = rebuilder.visit_module(node, modname, node_file, package)
|
||||
module._from_nodes = rebuilder._from_nodes
|
||||
module._delayed_assattr = rebuilder._delayed_assattr
|
||||
return module
|
||||
|
||||
def add_from_names_to_locals(self, node):
|
||||
"""store imported names to the locals;
|
||||
resort the locals if coming from a delayed node
|
||||
"""
|
||||
|
||||
_key_func = lambda node: node.fromlineno
|
||||
def sort_locals(my_list):
|
||||
my_list.sort(key=_key_func)
|
||||
for (name, asname) in node.names:
|
||||
if name == '*':
|
||||
try:
|
||||
imported = node.do_import_module()
|
||||
except InferenceError:
|
||||
continue
|
||||
for name in imported.wildcard_import_names():
|
||||
node.parent.set_local(name, node)
|
||||
sort_locals(node.parent.scope().locals[name])
|
||||
else:
|
||||
node.parent.set_local(asname or name, node)
|
||||
sort_locals(node.parent.scope().locals[asname or name])
|
||||
|
||||
def delayed_assattr(self, node):
|
||||
"""visit a AssAttr node -> add name to locals, handle members
|
||||
definition
|
||||
"""
|
||||
try:
|
||||
frame = node.frame()
|
||||
for infered in node.expr.infer():
|
||||
if infered is YES:
|
||||
continue
|
||||
try:
|
||||
if infered.__class__ is Instance:
|
||||
infered = infered._proxied
|
||||
iattrs = infered.instance_attrs
|
||||
elif isinstance(infered, Instance):
|
||||
# Const, Tuple, ... we may be wrong, may be not, but
|
||||
# anyway we don't want to pollute builtin's namespace
|
||||
continue
|
||||
elif infered.is_function:
|
||||
iattrs = infered.instance_attrs
|
||||
else:
|
||||
iattrs = infered.locals
|
||||
except AttributeError:
|
||||
# XXX log error
|
||||
#import traceback
|
||||
#traceback.print_exc()
|
||||
continue
|
||||
values = iattrs.setdefault(node.attrname, [])
|
||||
if node in values:
|
||||
continue
|
||||
# get assign in __init__ first XXX useful ?
|
||||
if frame.name == '__init__' and values and not \
|
||||
values[0].frame().name == '__init__':
|
||||
values.insert(0, node)
|
||||
else:
|
||||
values.append(node)
|
||||
except InferenceError:
|
||||
pass
|
||||
|
||||
51
plugins/bundle/python-mode/pymode/libs/astroid/exceptions.py
Normal file
51
plugins/bundle/python-mode/pymode/libs/astroid/exceptions.py
Normal file
|
|
@ -0,0 +1,51 @@
|
|||
# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
|
||||
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
|
||||
#
|
||||
# This file is part of astroid.
|
||||
#
|
||||
# astroid is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by the
|
||||
# Free Software Foundation, either version 2.1 of the License, or (at your
|
||||
# option) any later version.
|
||||
#
|
||||
# astroid is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
|
||||
# for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License along
|
||||
# with astroid. If not, see <http://www.gnu.org/licenses/>.
|
||||
"""this module contains exceptions used in the astroid library
|
||||
|
||||
"""
|
||||
|
||||
__doctype__ = "restructuredtext en"
|
||||
|
||||
class AstroidError(Exception):
|
||||
"""base exception class for all astroid related exceptions"""
|
||||
|
||||
class AstroidBuildingException(AstroidError):
|
||||
"""exception class when we are unable to build an astroid representation"""
|
||||
|
||||
class ResolveError(AstroidError):
|
||||
"""base class of astroid resolution/inference error"""
|
||||
|
||||
class NotFoundError(ResolveError):
|
||||
"""raised when we are unable to resolve a name"""
|
||||
|
||||
class InferenceError(ResolveError):
|
||||
"""raised when we are unable to infer a node"""
|
||||
|
||||
class UseInferenceDefault(Exception):
|
||||
"""exception to be raised in custom inference function to indicate that it
|
||||
should go back to the default behaviour
|
||||
"""
|
||||
|
||||
class UnresolvableName(InferenceError):
|
||||
"""raised when we are unable to resolve a name"""
|
||||
|
||||
class NoDefault(AstroidError):
|
||||
"""raised by function's `default_value` method when an argument has
|
||||
no default value
|
||||
"""
|
||||
|
||||
405
plugins/bundle/python-mode/pymode/libs/astroid/inference.py
Normal file
405
plugins/bundle/python-mode/pymode/libs/astroid/inference.py
Normal file
|
|
@ -0,0 +1,405 @@
|
|||
# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
|
||||
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
|
||||
#
|
||||
# This file is part of astroid.
|
||||
#
|
||||
# astroid is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by the
|
||||
# Free Software Foundation, either version 2.1 of the License, or (at your
|
||||
# option) any later version.
|
||||
#
|
||||
# astroid is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
|
||||
# for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License along
|
||||
# with astroid. If not, see <http://www.gnu.org/licenses/>.
|
||||
"""this module contains a set of functions to handle inference on astroid trees
|
||||
"""
|
||||
|
||||
__doctype__ = "restructuredtext en"
|
||||
|
||||
from itertools import chain
|
||||
|
||||
from astroid import nodes
|
||||
|
||||
from astroid.manager import AstroidManager
|
||||
from astroid.exceptions import (AstroidError, InferenceError, NoDefault,
|
||||
NotFoundError, UnresolvableName)
|
||||
from astroid.bases import (YES, Instance, InferenceContext,
|
||||
_infer_stmts, copy_context, path_wrapper,
|
||||
raise_if_nothing_infered)
|
||||
from astroid.protocols import (
|
||||
_arguments_infer_argname,
|
||||
BIN_OP_METHOD, UNARY_OP_METHOD)
|
||||
|
||||
MANAGER = AstroidManager()
|
||||
|
||||
|
||||
class CallContext(object):
|
||||
"""when inferring a function call, this class is used to remember values
|
||||
given as argument
|
||||
"""
|
||||
def __init__(self, args, starargs, dstarargs):
|
||||
self.args = []
|
||||
self.nargs = {}
|
||||
for arg in args:
|
||||
if isinstance(arg, nodes.Keyword):
|
||||
self.nargs[arg.arg] = arg.value
|
||||
else:
|
||||
self.args.append(arg)
|
||||
self.starargs = starargs
|
||||
self.dstarargs = dstarargs
|
||||
|
||||
def infer_argument(self, funcnode, name, context):
|
||||
"""infer a function argument value according to the call context"""
|
||||
# 1. search in named keywords
|
||||
try:
|
||||
return self.nargs[name].infer(context)
|
||||
except KeyError:
|
||||
# Function.args.args can be None in astroid (means that we don't have
|
||||
# information on argnames)
|
||||
argindex = funcnode.args.find_argname(name)[0]
|
||||
if argindex is not None:
|
||||
# 2. first argument of instance/class method
|
||||
if argindex == 0 and funcnode.type in ('method', 'classmethod'):
|
||||
if context.boundnode is not None:
|
||||
boundnode = context.boundnode
|
||||
else:
|
||||
# XXX can do better ?
|
||||
boundnode = funcnode.parent.frame()
|
||||
if funcnode.type == 'method':
|
||||
if not isinstance(boundnode, Instance):
|
||||
boundnode = Instance(boundnode)
|
||||
return iter((boundnode,))
|
||||
if funcnode.type == 'classmethod':
|
||||
return iter((boundnode,))
|
||||
# if we have a method, extract one position
|
||||
# from the index, so we'll take in account
|
||||
# the extra parameter represented by `self` or `cls`
|
||||
if funcnode.type in ('method', 'classmethod'):
|
||||
argindex -= 1
|
||||
# 2. search arg index
|
||||
try:
|
||||
return self.args[argindex].infer(context)
|
||||
except IndexError:
|
||||
pass
|
||||
# 3. search in *args (.starargs)
|
||||
if self.starargs is not None:
|
||||
its = []
|
||||
for infered in self.starargs.infer(context):
|
||||
if infered is YES:
|
||||
its.append((YES,))
|
||||
continue
|
||||
try:
|
||||
its.append(infered.getitem(argindex, context).infer(context))
|
||||
except (InferenceError, AttributeError):
|
||||
its.append((YES,))
|
||||
except (IndexError, TypeError):
|
||||
continue
|
||||
if its:
|
||||
return chain(*its)
|
||||
# 4. XXX search in **kwargs (.dstarargs)
|
||||
if self.dstarargs is not None:
|
||||
its = []
|
||||
for infered in self.dstarargs.infer(context):
|
||||
if infered is YES:
|
||||
its.append((YES,))
|
||||
continue
|
||||
try:
|
||||
its.append(infered.getitem(name, context).infer(context))
|
||||
except (InferenceError, AttributeError):
|
||||
its.append((YES,))
|
||||
except (IndexError, TypeError):
|
||||
continue
|
||||
if its:
|
||||
return chain(*its)
|
||||
# 5. */** argument, (Tuple or Dict)
|
||||
if name == funcnode.args.vararg:
|
||||
return iter((nodes.const_factory(())))
|
||||
if name == funcnode.args.kwarg:
|
||||
return iter((nodes.const_factory({})))
|
||||
# 6. return default value if any
|
||||
try:
|
||||
return funcnode.args.default_value(name).infer(context)
|
||||
except NoDefault:
|
||||
raise InferenceError(name)
|
||||
|
||||
|
||||
# .infer method ###############################################################
|
||||
|
||||
|
||||
def infer_end(self, context=None):
|
||||
"""inference's end for node such as Module, Class, Function, Const...
|
||||
"""
|
||||
yield self
|
||||
nodes.Module._infer = infer_end
|
||||
nodes.Class._infer = infer_end
|
||||
nodes.Function._infer = infer_end
|
||||
nodes.Lambda._infer = infer_end
|
||||
nodes.Const._infer = infer_end
|
||||
nodes.List._infer = infer_end
|
||||
nodes.Tuple._infer = infer_end
|
||||
nodes.Dict._infer = infer_end
|
||||
nodes.Set._infer = infer_end
|
||||
|
||||
def _higher_function_scope(node):
|
||||
""" Search for the first function which encloses the given
|
||||
scope. This can be used for looking up in that function's
|
||||
scope, in case looking up in a lower scope for a particular
|
||||
name fails.
|
||||
|
||||
:param node: A scope node.
|
||||
:returns:
|
||||
``None``, if no parent function scope was found,
|
||||
otherwise an instance of :class:`astroid.scoped_nodes.Function`,
|
||||
which encloses the given node.
|
||||
"""
|
||||
current = node
|
||||
while current.parent and not isinstance(current.parent, nodes.Function):
|
||||
current = current.parent
|
||||
if current and current.parent:
|
||||
return current.parent
|
||||
|
||||
def infer_name(self, context=None):
|
||||
"""infer a Name: use name lookup rules"""
|
||||
frame, stmts = self.lookup(self.name)
|
||||
if not stmts:
|
||||
# Try to see if the name is enclosed in a nested function
|
||||
# and use the higher (first function) scope for searching.
|
||||
# TODO: should this be promoted to other nodes as well?
|
||||
parent_function = _higher_function_scope(self.scope())
|
||||
if parent_function:
|
||||
_, stmts = parent_function.lookup(self.name)
|
||||
|
||||
if not stmts:
|
||||
raise UnresolvableName(self.name)
|
||||
context = context.clone()
|
||||
context.lookupname = self.name
|
||||
return _infer_stmts(stmts, context, frame)
|
||||
nodes.Name._infer = path_wrapper(infer_name)
|
||||
nodes.AssName.infer_lhs = infer_name # won't work with a path wrapper
|
||||
|
||||
|
||||
def infer_callfunc(self, context=None):
|
||||
"""infer a CallFunc node by trying to guess what the function returns"""
|
||||
callcontext = context.clone()
|
||||
callcontext.callcontext = CallContext(self.args, self.starargs, self.kwargs)
|
||||
callcontext.boundnode = None
|
||||
for callee in self.func.infer(context):
|
||||
if callee is YES:
|
||||
yield callee
|
||||
continue
|
||||
try:
|
||||
if hasattr(callee, 'infer_call_result'):
|
||||
for infered in callee.infer_call_result(self, callcontext):
|
||||
yield infered
|
||||
except InferenceError:
|
||||
## XXX log error ?
|
||||
continue
|
||||
nodes.CallFunc._infer = path_wrapper(raise_if_nothing_infered(infer_callfunc))
|
||||
|
||||
|
||||
def infer_import(self, context=None, asname=True):
|
||||
"""infer an Import node: return the imported module/object"""
|
||||
name = context.lookupname
|
||||
if name is None:
|
||||
raise InferenceError()
|
||||
if asname:
|
||||
yield self.do_import_module(self.real_name(name))
|
||||
else:
|
||||
yield self.do_import_module(name)
|
||||
nodes.Import._infer = path_wrapper(infer_import)
|
||||
|
||||
def infer_name_module(self, name):
|
||||
context = InferenceContext()
|
||||
context.lookupname = name
|
||||
return self.infer(context, asname=False)
|
||||
nodes.Import.infer_name_module = infer_name_module
|
||||
|
||||
|
||||
def infer_from(self, context=None, asname=True):
|
||||
"""infer a From nodes: return the imported module/object"""
|
||||
name = context.lookupname
|
||||
if name is None:
|
||||
raise InferenceError()
|
||||
if asname:
|
||||
name = self.real_name(name)
|
||||
module = self.do_import_module()
|
||||
try:
|
||||
context = copy_context(context)
|
||||
context.lookupname = name
|
||||
return _infer_stmts(module.getattr(name, ignore_locals=module is self.root()), context)
|
||||
except NotFoundError:
|
||||
raise InferenceError(name)
|
||||
nodes.From._infer = path_wrapper(infer_from)
|
||||
|
||||
|
||||
def infer_getattr(self, context=None):
|
||||
"""infer a Getattr node by using getattr on the associated object"""
|
||||
for owner in self.expr.infer(context):
|
||||
if owner is YES:
|
||||
yield owner
|
||||
continue
|
||||
try:
|
||||
context.boundnode = owner
|
||||
for obj in owner.igetattr(self.attrname, context):
|
||||
yield obj
|
||||
context.boundnode = None
|
||||
except (NotFoundError, InferenceError):
|
||||
context.boundnode = None
|
||||
except AttributeError:
|
||||
# XXX method / function
|
||||
context.boundnode = None
|
||||
nodes.Getattr._infer = path_wrapper(raise_if_nothing_infered(infer_getattr))
|
||||
nodes.AssAttr.infer_lhs = raise_if_nothing_infered(infer_getattr) # # won't work with a path wrapper
|
||||
|
||||
|
||||
def infer_global(self, context=None):
|
||||
if context.lookupname is None:
|
||||
raise InferenceError()
|
||||
try:
|
||||
return _infer_stmts(self.root().getattr(context.lookupname), context)
|
||||
except NotFoundError:
|
||||
raise InferenceError()
|
||||
nodes.Global._infer = path_wrapper(infer_global)
|
||||
|
||||
|
||||
def infer_subscript(self, context=None):
|
||||
"""infer simple subscription such as [1,2,3][0] or (1,2,3)[-1]"""
|
||||
value = next(self.value.infer(context))
|
||||
if value is YES:
|
||||
yield YES
|
||||
return
|
||||
|
||||
index = next(self.slice.infer(context))
|
||||
if index is YES:
|
||||
yield YES
|
||||
return
|
||||
|
||||
if isinstance(index, nodes.Const):
|
||||
try:
|
||||
assigned = value.getitem(index.value, context)
|
||||
except AttributeError:
|
||||
raise InferenceError()
|
||||
except (IndexError, TypeError):
|
||||
yield YES
|
||||
return
|
||||
|
||||
# Prevent inferring if the infered subscript
|
||||
# is the same as the original subscripted object.
|
||||
if self is assigned:
|
||||
yield YES
|
||||
return
|
||||
for infered in assigned.infer(context):
|
||||
yield infered
|
||||
else:
|
||||
raise InferenceError()
|
||||
nodes.Subscript._infer = path_wrapper(infer_subscript)
|
||||
nodes.Subscript.infer_lhs = raise_if_nothing_infered(infer_subscript)
|
||||
|
||||
def infer_unaryop(self, context=None):
|
||||
for operand in self.operand.infer(context):
|
||||
try:
|
||||
yield operand.infer_unary_op(self.op)
|
||||
except TypeError:
|
||||
continue
|
||||
except AttributeError:
|
||||
meth = UNARY_OP_METHOD[self.op]
|
||||
if meth is None:
|
||||
yield YES
|
||||
else:
|
||||
try:
|
||||
# XXX just suppose if the type implement meth, returned type
|
||||
# will be the same
|
||||
operand.getattr(meth)
|
||||
yield operand
|
||||
except GeneratorExit:
|
||||
raise
|
||||
except:
|
||||
yield YES
|
||||
nodes.UnaryOp._infer = path_wrapper(infer_unaryop)
|
||||
|
||||
def _infer_binop(operator, operand1, operand2, context, failures=None):
|
||||
if operand1 is YES:
|
||||
yield operand1
|
||||
return
|
||||
try:
|
||||
for valnode in operand1.infer_binary_op(operator, operand2, context):
|
||||
yield valnode
|
||||
except AttributeError:
|
||||
try:
|
||||
# XXX just suppose if the type implement meth, returned type
|
||||
# will be the same
|
||||
operand1.getattr(BIN_OP_METHOD[operator])
|
||||
yield operand1
|
||||
except:
|
||||
if failures is None:
|
||||
yield YES
|
||||
else:
|
||||
failures.append(operand1)
|
||||
|
||||
def infer_binop(self, context=None):
|
||||
failures = []
|
||||
for lhs in self.left.infer(context):
|
||||
for val in _infer_binop(self.op, lhs, self.right, context, failures):
|
||||
yield val
|
||||
for lhs in failures:
|
||||
for rhs in self.right.infer(context):
|
||||
for val in _infer_binop(self.op, rhs, lhs, context):
|
||||
yield val
|
||||
nodes.BinOp._infer = path_wrapper(infer_binop)
|
||||
|
||||
|
||||
def infer_arguments(self, context=None):
|
||||
name = context.lookupname
|
||||
if name is None:
|
||||
raise InferenceError()
|
||||
return _arguments_infer_argname(self, name, context)
|
||||
nodes.Arguments._infer = infer_arguments
|
||||
|
||||
|
||||
def infer_ass(self, context=None):
|
||||
"""infer a AssName/AssAttr: need to inspect the RHS part of the
|
||||
assign node
|
||||
"""
|
||||
stmt = self.statement()
|
||||
if isinstance(stmt, nodes.AugAssign):
|
||||
return stmt.infer(context)
|
||||
stmts = list(self.assigned_stmts(context=context))
|
||||
return _infer_stmts(stmts, context)
|
||||
nodes.AssName._infer = path_wrapper(infer_ass)
|
||||
nodes.AssAttr._infer = path_wrapper(infer_ass)
|
||||
|
||||
def infer_augassign(self, context=None):
|
||||
failures = []
|
||||
for lhs in self.target.infer_lhs(context):
|
||||
for val in _infer_binop(self.op, lhs, self.value, context, failures):
|
||||
yield val
|
||||
for lhs in failures:
|
||||
for rhs in self.value.infer(context):
|
||||
for val in _infer_binop(self.op, rhs, lhs, context):
|
||||
yield val
|
||||
nodes.AugAssign._infer = path_wrapper(infer_augassign)
|
||||
|
||||
|
||||
# no infer method on DelName and DelAttr (expected InferenceError)
|
||||
|
||||
|
||||
def infer_empty_node(self, context=None):
|
||||
if not self.has_underlying_object():
|
||||
yield YES
|
||||
else:
|
||||
try:
|
||||
for infered in MANAGER.infer_ast_from_something(self.object,
|
||||
context=context):
|
||||
yield infered
|
||||
except AstroidError:
|
||||
yield YES
|
||||
nodes.EmptyNode._infer = path_wrapper(infer_empty_node)
|
||||
|
||||
|
||||
def infer_index(self, context=None):
|
||||
return self.value.infer(context)
|
||||
nodes.Index._infer = infer_index
|
||||
273
plugins/bundle/python-mode/pymode/libs/astroid/inspector.py
Normal file
273
plugins/bundle/python-mode/pymode/libs/astroid/inspector.py
Normal file
|
|
@ -0,0 +1,273 @@
|
|||
# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
|
||||
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
|
||||
#
|
||||
# This file is part of astroid.
|
||||
#
|
||||
# astroid is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by the
|
||||
# Free Software Foundation, either version 2.1 of the License, or (at your
|
||||
# option) any later version.
|
||||
#
|
||||
# astroid is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
|
||||
# for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License along
|
||||
# with astroid. If not, see <http://www.gnu.org/licenses/>.
|
||||
"""visitor doing some postprocessing on the astroid tree.
|
||||
Try to resolve definitions (namespace) dictionary, relationship...
|
||||
|
||||
This module has been imported from pyreverse
|
||||
"""
|
||||
|
||||
__docformat__ = "restructuredtext en"
|
||||
|
||||
from os.path import dirname
|
||||
|
||||
import astroid
|
||||
from astroid.exceptions import InferenceError
|
||||
from astroid.utils import LocalsVisitor
|
||||
from astroid.modutils import get_module_part, is_relative, is_standard_module
|
||||
|
||||
class IdGeneratorMixIn(object):
|
||||
"""
|
||||
Mixin adding the ability to generate integer uid
|
||||
"""
|
||||
def __init__(self, start_value=0):
|
||||
self.id_count = start_value
|
||||
|
||||
def init_counter(self, start_value=0):
|
||||
"""init the id counter
|
||||
"""
|
||||
self.id_count = start_value
|
||||
|
||||
def generate_id(self):
|
||||
"""generate a new identifier
|
||||
"""
|
||||
self.id_count += 1
|
||||
return self.id_count
|
||||
|
||||
|
||||
class Linker(IdGeneratorMixIn, LocalsVisitor):
|
||||
"""
|
||||
walk on the project tree and resolve relationships.
|
||||
|
||||
According to options the following attributes may be added to visited nodes:
|
||||
|
||||
* uid,
|
||||
a unique identifier for the node (on astroid.Project, astroid.Module,
|
||||
astroid.Class and astroid.locals_type). Only if the linker has been instantiated
|
||||
with tag=True parameter (False by default).
|
||||
|
||||
* Function
|
||||
a mapping from locals names to their bounded value, which may be a
|
||||
constant like a string or an integer, or an astroid node (on astroid.Module,
|
||||
astroid.Class and astroid.Function).
|
||||
|
||||
* instance_attrs_type
|
||||
as locals_type but for klass member attributes (only on astroid.Class)
|
||||
|
||||
* implements,
|
||||
list of implemented interface _objects_ (only on astroid.Class nodes)
|
||||
"""
|
||||
|
||||
def __init__(self, project, inherited_interfaces=0, tag=False):
|
||||
IdGeneratorMixIn.__init__(self)
|
||||
LocalsVisitor.__init__(self)
|
||||
# take inherited interface in consideration or not
|
||||
self.inherited_interfaces = inherited_interfaces
|
||||
# tag nodes or not
|
||||
self.tag = tag
|
||||
# visited project
|
||||
self.project = project
|
||||
|
||||
|
||||
def visit_project(self, node):
|
||||
"""visit an astroid.Project node
|
||||
|
||||
* optionally tag the node with a unique id
|
||||
"""
|
||||
if self.tag:
|
||||
node.uid = self.generate_id()
|
||||
for module in node.modules:
|
||||
self.visit(module)
|
||||
|
||||
def visit_package(self, node):
|
||||
"""visit an astroid.Package node
|
||||
|
||||
* optionally tag the node with a unique id
|
||||
"""
|
||||
if self.tag:
|
||||
node.uid = self.generate_id()
|
||||
for subelmt in node.values():
|
||||
self.visit(subelmt)
|
||||
|
||||
def visit_module(self, node):
|
||||
"""visit an astroid.Module node
|
||||
|
||||
* set the locals_type mapping
|
||||
* set the depends mapping
|
||||
* optionally tag the node with a unique id
|
||||
"""
|
||||
if hasattr(node, 'locals_type'):
|
||||
return
|
||||
node.locals_type = {}
|
||||
node.depends = []
|
||||
if self.tag:
|
||||
node.uid = self.generate_id()
|
||||
|
||||
def visit_class(self, node):
|
||||
"""visit an astroid.Class node
|
||||
|
||||
* set the locals_type and instance_attrs_type mappings
|
||||
* set the implements list and build it
|
||||
* optionally tag the node with a unique id
|
||||
"""
|
||||
if hasattr(node, 'locals_type'):
|
||||
return
|
||||
node.locals_type = {}
|
||||
if self.tag:
|
||||
node.uid = self.generate_id()
|
||||
# resolve ancestors
|
||||
for baseobj in node.ancestors(recurs=False):
|
||||
specializations = getattr(baseobj, 'specializations', [])
|
||||
specializations.append(node)
|
||||
baseobj.specializations = specializations
|
||||
# resolve instance attributes
|
||||
node.instance_attrs_type = {}
|
||||
for assattrs in node.instance_attrs.values():
|
||||
for assattr in assattrs:
|
||||
self.handle_assattr_type(assattr, node)
|
||||
# resolve implemented interface
|
||||
try:
|
||||
node.implements = list(node.interfaces(self.inherited_interfaces))
|
||||
except InferenceError:
|
||||
node.implements = ()
|
||||
|
||||
def visit_function(self, node):
|
||||
"""visit an astroid.Function node
|
||||
|
||||
* set the locals_type mapping
|
||||
* optionally tag the node with a unique id
|
||||
"""
|
||||
if hasattr(node, 'locals_type'):
|
||||
return
|
||||
node.locals_type = {}
|
||||
if self.tag:
|
||||
node.uid = self.generate_id()
|
||||
|
||||
link_project = visit_project
|
||||
link_module = visit_module
|
||||
link_class = visit_class
|
||||
link_function = visit_function
|
||||
|
||||
def visit_assname(self, node):
|
||||
"""visit an astroid.AssName node
|
||||
|
||||
handle locals_type
|
||||
"""
|
||||
# avoid double parsing done by different Linkers.visit
|
||||
# running over the same project:
|
||||
if hasattr(node, '_handled'):
|
||||
return
|
||||
node._handled = True
|
||||
if node.name in node.frame():
|
||||
frame = node.frame()
|
||||
else:
|
||||
# the name has been defined as 'global' in the frame and belongs
|
||||
# there. Btw the frame is not yet visited as the name is in the
|
||||
# root locals; the frame hence has no locals_type attribute
|
||||
frame = node.root()
|
||||
try:
|
||||
values = node.infered()
|
||||
try:
|
||||
already_infered = frame.locals_type[node.name]
|
||||
for valnode in values:
|
||||
if not valnode in already_infered:
|
||||
already_infered.append(valnode)
|
||||
except KeyError:
|
||||
frame.locals_type[node.name] = values
|
||||
except astroid.InferenceError:
|
||||
pass
|
||||
|
||||
def handle_assattr_type(self, node, parent):
|
||||
"""handle an astroid.AssAttr node
|
||||
|
||||
handle instance_attrs_type
|
||||
"""
|
||||
try:
|
||||
values = list(node.infer())
|
||||
try:
|
||||
already_infered = parent.instance_attrs_type[node.attrname]
|
||||
for valnode in values:
|
||||
if not valnode in already_infered:
|
||||
already_infered.append(valnode)
|
||||
except KeyError:
|
||||
parent.instance_attrs_type[node.attrname] = values
|
||||
except astroid.InferenceError:
|
||||
pass
|
||||
|
||||
def visit_import(self, node):
|
||||
"""visit an astroid.Import node
|
||||
|
||||
resolve module dependencies
|
||||
"""
|
||||
context_file = node.root().file
|
||||
for name in node.names:
|
||||
relative = is_relative(name[0], context_file)
|
||||
self._imported_module(node, name[0], relative)
|
||||
|
||||
|
||||
def visit_from(self, node):
|
||||
"""visit an astroid.From node
|
||||
|
||||
resolve module dependencies
|
||||
"""
|
||||
basename = node.modname
|
||||
context_file = node.root().file
|
||||
if context_file is not None:
|
||||
relative = is_relative(basename, context_file)
|
||||
else:
|
||||
relative = False
|
||||
for name in node.names:
|
||||
if name[0] == '*':
|
||||
continue
|
||||
# analyze dependencies
|
||||
fullname = '%s.%s' % (basename, name[0])
|
||||
if fullname.find('.') > -1:
|
||||
try:
|
||||
# XXX: don't use get_module_part, missing package precedence
|
||||
fullname = get_module_part(fullname, context_file)
|
||||
except ImportError:
|
||||
continue
|
||||
if fullname != basename:
|
||||
self._imported_module(node, fullname, relative)
|
||||
|
||||
|
||||
def compute_module(self, context_name, mod_path):
|
||||
"""return true if the module should be added to dependencies"""
|
||||
package_dir = dirname(self.project.path)
|
||||
if context_name == mod_path:
|
||||
return 0
|
||||
elif is_standard_module(mod_path, (package_dir,)):
|
||||
return 1
|
||||
return 0
|
||||
|
||||
# protected methods ########################################################
|
||||
|
||||
def _imported_module(self, node, mod_path, relative):
|
||||
"""notify an imported module, used to analyze dependencies
|
||||
"""
|
||||
module = node.root()
|
||||
context_name = module.name
|
||||
if relative:
|
||||
mod_path = '%s.%s' % ('.'.join(context_name.split('.')[:-1]),
|
||||
mod_path)
|
||||
if self.compute_module(context_name, mod_path):
|
||||
# handle dependencies
|
||||
if not hasattr(module, 'depends'):
|
||||
module.depends = []
|
||||
mod_paths = module.depends
|
||||
if not mod_path in mod_paths:
|
||||
mod_paths.append(mod_path)
|
||||
391
plugins/bundle/python-mode/pymode/libs/astroid/manager.py
Normal file
391
plugins/bundle/python-mode/pymode/libs/astroid/manager.py
Normal file
|
|
@ -0,0 +1,391 @@
|
|||
# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
|
||||
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
|
||||
#
|
||||
# This file is part of astroid.
|
||||
#
|
||||
# astroid is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by the
|
||||
# Free Software Foundation, either version 2.1 of the License, or (at your
|
||||
# option) any later version.
|
||||
#
|
||||
# astroid is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
|
||||
# for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License along
|
||||
# with astroid. If not, see <http://www.gnu.org/licenses/>.
|
||||
"""astroid manager: avoid multiple astroid build of a same module when
|
||||
possible by providing a class responsible to get astroid representation
|
||||
from various source and using a cache of built modules)
|
||||
"""
|
||||
from __future__ import print_function
|
||||
|
||||
__docformat__ = "restructuredtext en"
|
||||
|
||||
import collections
|
||||
import imp
|
||||
import os
|
||||
from os.path import dirname, join, isdir, exists
|
||||
from warnings import warn
|
||||
import zipimport
|
||||
|
||||
from logilab.common.configuration import OptionsProviderMixIn
|
||||
|
||||
from astroid.exceptions import AstroidBuildingException
|
||||
from astroid import modutils
|
||||
|
||||
|
||||
def astroid_wrapper(func, modname):
|
||||
"""wrapper to give to AstroidManager.project_from_files"""
|
||||
print('parsing %s...' % modname)
|
||||
try:
|
||||
return func(modname)
|
||||
except AstroidBuildingException as exc:
|
||||
print(exc)
|
||||
except Exception as exc:
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
|
||||
def _silent_no_wrap(func, modname):
|
||||
"""silent wrapper that doesn't do anything; can be used for tests"""
|
||||
return func(modname)
|
||||
|
||||
def safe_repr(obj):
|
||||
try:
|
||||
return repr(obj)
|
||||
except:
|
||||
return '???'
|
||||
|
||||
|
||||
|
||||
class AstroidManager(OptionsProviderMixIn):
|
||||
"""the astroid manager, responsible to build astroid from files
|
||||
or modules.
|
||||
|
||||
Use the Borg pattern.
|
||||
"""
|
||||
|
||||
name = 'astroid loader'
|
||||
options = (("ignore",
|
||||
{'type' : "csv", 'metavar' : "<file>",
|
||||
'dest' : "black_list", "default" : ('CVS',),
|
||||
'help' : "add <file> (may be a directory) to the black list\
|
||||
. It should be a base name, not a path. You may set this option multiple times\
|
||||
."}),
|
||||
("project",
|
||||
{'default': "No Name", 'type' : 'string', 'short': 'p',
|
||||
'metavar' : '<project name>',
|
||||
'help' : 'set the project name.'}),
|
||||
)
|
||||
brain = {}
|
||||
def __init__(self):
|
||||
self.__dict__ = AstroidManager.brain
|
||||
if not self.__dict__:
|
||||
OptionsProviderMixIn.__init__(self)
|
||||
self.load_defaults()
|
||||
# NOTE: cache entries are added by the [re]builder
|
||||
self.astroid_cache = {}
|
||||
self._mod_file_cache = {}
|
||||
self.transforms = collections.defaultdict(list)
|
||||
self._failed_import_hooks = []
|
||||
self.always_load_extensions = False
|
||||
self.optimize_ast = False
|
||||
self.extension_package_whitelist = set()
|
||||
|
||||
def ast_from_file(self, filepath, modname=None, fallback=True, source=False):
|
||||
"""given a module name, return the astroid object"""
|
||||
try:
|
||||
filepath = modutils.get_source_file(filepath, include_no_ext=True)
|
||||
source = True
|
||||
except modutils.NoSourceFile:
|
||||
pass
|
||||
if modname is None:
|
||||
try:
|
||||
modname = '.'.join(modutils.modpath_from_file(filepath))
|
||||
except ImportError:
|
||||
modname = filepath
|
||||
if modname in self.astroid_cache and self.astroid_cache[modname].file == filepath:
|
||||
return self.astroid_cache[modname]
|
||||
if source:
|
||||
from astroid.builder import AstroidBuilder
|
||||
return AstroidBuilder(self).file_build(filepath, modname)
|
||||
elif fallback and modname:
|
||||
return self.ast_from_module_name(modname)
|
||||
raise AstroidBuildingException('unable to get astroid for file %s' %
|
||||
filepath)
|
||||
|
||||
def _build_stub_module(self, modname):
|
||||
from astroid.builder import AstroidBuilder
|
||||
return AstroidBuilder(self).string_build('', modname)
|
||||
|
||||
def _can_load_extension(self, modname):
|
||||
if self.always_load_extensions:
|
||||
return True
|
||||
if modutils.is_standard_module(modname):
|
||||
return True
|
||||
parts = modname.split('.')
|
||||
return any(
|
||||
'.'.join(parts[:x]) in self.extension_package_whitelist
|
||||
for x in range(1, len(parts) + 1))
|
||||
|
||||
def ast_from_module_name(self, modname, context_file=None):
|
||||
"""given a module name, return the astroid object"""
|
||||
if modname in self.astroid_cache:
|
||||
return self.astroid_cache[modname]
|
||||
if modname == '__main__':
|
||||
return self._build_stub_module(modname)
|
||||
old_cwd = os.getcwd()
|
||||
if context_file:
|
||||
os.chdir(dirname(context_file))
|
||||
try:
|
||||
filepath, mp_type = self.file_from_module_name(modname, context_file)
|
||||
if mp_type == modutils.PY_ZIPMODULE:
|
||||
module = self.zip_import_data(filepath)
|
||||
if module is not None:
|
||||
return module
|
||||
elif mp_type in (imp.C_BUILTIN, imp.C_EXTENSION):
|
||||
if mp_type == imp.C_EXTENSION and not self._can_load_extension(modname):
|
||||
return self._build_stub_module(modname)
|
||||
try:
|
||||
module = modutils.load_module_from_name(modname)
|
||||
except Exception as ex:
|
||||
msg = 'Unable to load module %s (%s)' % (modname, ex)
|
||||
raise AstroidBuildingException(msg)
|
||||
return self.ast_from_module(module, modname)
|
||||
elif mp_type == imp.PY_COMPILED:
|
||||
raise AstroidBuildingException("Unable to load compiled module %s" % (modname,))
|
||||
if filepath is None:
|
||||
raise AstroidBuildingException("Unable to load module %s" % (modname,))
|
||||
return self.ast_from_file(filepath, modname, fallback=False)
|
||||
except AstroidBuildingException as e:
|
||||
for hook in self._failed_import_hooks:
|
||||
try:
|
||||
return hook(modname)
|
||||
except AstroidBuildingException:
|
||||
pass
|
||||
raise e
|
||||
finally:
|
||||
os.chdir(old_cwd)
|
||||
|
||||
def zip_import_data(self, filepath):
|
||||
if zipimport is None:
|
||||
return None
|
||||
from astroid.builder import AstroidBuilder
|
||||
builder = AstroidBuilder(self)
|
||||
for ext in ('.zip', '.egg'):
|
||||
try:
|
||||
eggpath, resource = filepath.rsplit(ext + os.path.sep, 1)
|
||||
except ValueError:
|
||||
continue
|
||||
try:
|
||||
importer = zipimport.zipimporter(eggpath + ext)
|
||||
zmodname = resource.replace(os.path.sep, '.')
|
||||
if importer.is_package(resource):
|
||||
zmodname = zmodname + '.__init__'
|
||||
module = builder.string_build(importer.get_source(resource),
|
||||
zmodname, filepath)
|
||||
return module
|
||||
except:
|
||||
continue
|
||||
return None
|
||||
|
||||
def file_from_module_name(self, modname, contextfile):
|
||||
try:
|
||||
value = self._mod_file_cache[(modname, contextfile)]
|
||||
except KeyError:
|
||||
try:
|
||||
value = modutils.file_info_from_modpath(
|
||||
modname.split('.'), context_file=contextfile)
|
||||
except ImportError as ex:
|
||||
msg = 'Unable to load module %s (%s)' % (modname, ex)
|
||||
value = AstroidBuildingException(msg)
|
||||
self._mod_file_cache[(modname, contextfile)] = value
|
||||
if isinstance(value, AstroidBuildingException):
|
||||
raise value
|
||||
return value
|
||||
|
||||
def ast_from_module(self, module, modname=None):
|
||||
"""given an imported module, return the astroid object"""
|
||||
modname = modname or module.__name__
|
||||
if modname in self.astroid_cache:
|
||||
return self.astroid_cache[modname]
|
||||
try:
|
||||
# some builtin modules don't have __file__ attribute
|
||||
filepath = module.__file__
|
||||
if modutils.is_python_source(filepath):
|
||||
return self.ast_from_file(filepath, modname)
|
||||
except AttributeError:
|
||||
pass
|
||||
from astroid.builder import AstroidBuilder
|
||||
return AstroidBuilder(self).module_build(module, modname)
|
||||
|
||||
def ast_from_class(self, klass, modname=None):
|
||||
"""get astroid for the given class"""
|
||||
if modname is None:
|
||||
try:
|
||||
modname = klass.__module__
|
||||
except AttributeError:
|
||||
raise AstroidBuildingException(
|
||||
'Unable to get module for class %s' % safe_repr(klass))
|
||||
modastroid = self.ast_from_module_name(modname)
|
||||
return modastroid.getattr(klass.__name__)[0] # XXX
|
||||
|
||||
|
||||
def infer_ast_from_something(self, obj, context=None):
|
||||
"""infer astroid for the given class"""
|
||||
if hasattr(obj, '__class__') and not isinstance(obj, type):
|
||||
klass = obj.__class__
|
||||
else:
|
||||
klass = obj
|
||||
try:
|
||||
modname = klass.__module__
|
||||
except AttributeError:
|
||||
raise AstroidBuildingException(
|
||||
'Unable to get module for %s' % safe_repr(klass))
|
||||
except Exception as ex:
|
||||
raise AstroidBuildingException(
|
||||
'Unexpected error while retrieving module for %s: %s'
|
||||
% (safe_repr(klass), ex))
|
||||
try:
|
||||
name = klass.__name__
|
||||
except AttributeError:
|
||||
raise AstroidBuildingException(
|
||||
'Unable to get name for %s' % safe_repr(klass))
|
||||
except Exception as ex:
|
||||
raise AstroidBuildingException(
|
||||
'Unexpected error while retrieving name for %s: %s'
|
||||
% (safe_repr(klass), ex))
|
||||
# take care, on living object __module__ is regularly wrong :(
|
||||
modastroid = self.ast_from_module_name(modname)
|
||||
if klass is obj:
|
||||
for infered in modastroid.igetattr(name, context):
|
||||
yield infered
|
||||
else:
|
||||
for infered in modastroid.igetattr(name, context):
|
||||
yield infered.instanciate_class()
|
||||
|
||||
def project_from_files(self, files, func_wrapper=astroid_wrapper,
|
||||
project_name=None, black_list=None):
|
||||
"""return a Project from a list of files or modules"""
|
||||
# build the project representation
|
||||
project_name = project_name or self.config.project
|
||||
black_list = black_list or self.config.black_list
|
||||
project = Project(project_name)
|
||||
for something in files:
|
||||
if not exists(something):
|
||||
fpath = modutils.file_from_modpath(something.split('.'))
|
||||
elif isdir(something):
|
||||
fpath = join(something, '__init__.py')
|
||||
else:
|
||||
fpath = something
|
||||
astroid = func_wrapper(self.ast_from_file, fpath)
|
||||
if astroid is None:
|
||||
continue
|
||||
# XXX why is first file defining the project.path ?
|
||||
project.path = project.path or astroid.file
|
||||
project.add_module(astroid)
|
||||
base_name = astroid.name
|
||||
# recurse in package except if __init__ was explicitly given
|
||||
if astroid.package and something.find('__init__') == -1:
|
||||
# recurse on others packages / modules if this is a package
|
||||
for fpath in modutils.get_module_files(dirname(astroid.file),
|
||||
black_list):
|
||||
astroid = func_wrapper(self.ast_from_file, fpath)
|
||||
if astroid is None or astroid.name == base_name:
|
||||
continue
|
||||
project.add_module(astroid)
|
||||
return project
|
||||
|
||||
def register_transform(self, node_class, transform, predicate=None):
|
||||
"""Register `transform(node)` function to be applied on the given
|
||||
Astroid's `node_class` if `predicate` is None or returns true
|
||||
when called with the node as argument.
|
||||
|
||||
The transform function may return a value which is then used to
|
||||
substitute the original node in the tree.
|
||||
"""
|
||||
self.transforms[node_class].append((transform, predicate))
|
||||
|
||||
def unregister_transform(self, node_class, transform, predicate=None):
|
||||
"""Unregister the given transform."""
|
||||
self.transforms[node_class].remove((transform, predicate))
|
||||
|
||||
def register_failed_import_hook(self, hook):
|
||||
"""Registers a hook to resolve imports that cannot be found otherwise.
|
||||
|
||||
`hook` must be a function that accepts a single argument `modname` which
|
||||
contains the name of the module or package that could not be imported.
|
||||
If `hook` can resolve the import, must return a node of type `astroid.Module`,
|
||||
otherwise, it must raise `AstroidBuildingException`.
|
||||
"""
|
||||
self._failed_import_hooks.append(hook)
|
||||
|
||||
def transform(self, node):
|
||||
"""Call matching transforms for the given node if any and return the
|
||||
transformed node.
|
||||
"""
|
||||
cls = node.__class__
|
||||
if cls not in self.transforms:
|
||||
# no transform registered for this class of node
|
||||
return node
|
||||
|
||||
transforms = self.transforms[cls]
|
||||
orig_node = node # copy the reference
|
||||
for transform_func, predicate in transforms:
|
||||
if predicate is None or predicate(node):
|
||||
ret = transform_func(node)
|
||||
# if the transformation function returns something, it's
|
||||
# expected to be a replacement for the node
|
||||
if ret is not None:
|
||||
if node is not orig_node:
|
||||
# node has already be modified by some previous
|
||||
# transformation, warn about it
|
||||
warn('node %s substituted multiple times' % node)
|
||||
node = ret
|
||||
return node
|
||||
|
||||
def cache_module(self, module):
|
||||
"""Cache a module if no module with the same name is known yet."""
|
||||
self.astroid_cache.setdefault(module.name, module)
|
||||
|
||||
def clear_cache(self, astroid_builtin=None):
|
||||
# XXX clear transforms
|
||||
self.astroid_cache.clear()
|
||||
# force bootstrap again, else we may ends up with cache inconsistency
|
||||
# between the manager and CONST_PROXY, making
|
||||
# unittest_lookup.LookupTC.test_builtin_lookup fail depending on the
|
||||
# test order
|
||||
import astroid.raw_building
|
||||
astroid.raw_building._astroid_bootstrapping(
|
||||
astroid_builtin=astroid_builtin)
|
||||
|
||||
|
||||
class Project(object):
|
||||
"""a project handle a set of modules / packages"""
|
||||
def __init__(self, name=''):
|
||||
self.name = name
|
||||
self.path = None
|
||||
self.modules = []
|
||||
self.locals = {}
|
||||
self.__getitem__ = self.locals.__getitem__
|
||||
self.__iter__ = self.locals.__iter__
|
||||
self.values = self.locals.values
|
||||
self.keys = self.locals.keys
|
||||
self.items = self.locals.items
|
||||
|
||||
def add_module(self, node):
|
||||
self.locals[node.name] = node
|
||||
self.modules.append(node)
|
||||
|
||||
def get_module(self, name):
|
||||
return self.locals[name]
|
||||
|
||||
def get_children(self):
|
||||
return self.modules
|
||||
|
||||
def __repr__(self):
|
||||
return '<Project %r at %s (%s modules)>' % (self.name, id(self),
|
||||
len(self.modules))
|
||||
|
||||
|
||||
124
plugins/bundle/python-mode/pymode/libs/astroid/mixins.py
Normal file
124
plugins/bundle/python-mode/pymode/libs/astroid/mixins.py
Normal file
|
|
@ -0,0 +1,124 @@
|
|||
# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
|
||||
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
|
||||
#
|
||||
# This file is part of astroid.
|
||||
#
|
||||
# astroid is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by the
|
||||
# Free Software Foundation, either version 2.1 of the License, or (at your
|
||||
# option) any later version.
|
||||
#
|
||||
# astroid is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
|
||||
# for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License along
|
||||
# with astroid. If not, see <http://www.gnu.org/licenses/>.
|
||||
"""This module contains some mixins for the different nodes.
|
||||
"""
|
||||
|
||||
from logilab.common.decorators import cachedproperty
|
||||
|
||||
from astroid.exceptions import (AstroidBuildingException, InferenceError,
|
||||
NotFoundError)
|
||||
|
||||
|
||||
class BlockRangeMixIn(object):
|
||||
"""override block range """
|
||||
|
||||
@cachedproperty
|
||||
def blockstart_tolineno(self):
|
||||
return self.lineno
|
||||
|
||||
def _elsed_block_range(self, lineno, orelse, last=None):
|
||||
"""handle block line numbers range for try/finally, for, if and while
|
||||
statements
|
||||
"""
|
||||
if lineno == self.fromlineno:
|
||||
return lineno, lineno
|
||||
if orelse:
|
||||
if lineno >= orelse[0].fromlineno:
|
||||
return lineno, orelse[-1].tolineno
|
||||
return lineno, orelse[0].fromlineno - 1
|
||||
return lineno, last or self.tolineno
|
||||
|
||||
|
||||
class FilterStmtsMixin(object):
|
||||
"""Mixin for statement filtering and assignment type"""
|
||||
|
||||
def _get_filtered_stmts(self, _, node, _stmts, mystmt):
|
||||
"""method used in _filter_stmts to get statemtents and trigger break"""
|
||||
if self.statement() is mystmt:
|
||||
# original node's statement is the assignment, only keep
|
||||
# current node (gen exp, list comp)
|
||||
return [node], True
|
||||
return _stmts, False
|
||||
|
||||
def ass_type(self):
|
||||
return self
|
||||
|
||||
|
||||
class AssignTypeMixin(object):
|
||||
|
||||
def ass_type(self):
|
||||
return self
|
||||
|
||||
def _get_filtered_stmts(self, lookup_node, node, _stmts, mystmt):
|
||||
"""method used in filter_stmts"""
|
||||
if self is mystmt:
|
||||
return _stmts, True
|
||||
if self.statement() is mystmt:
|
||||
# original node's statement is the assignment, only keep
|
||||
# current node (gen exp, list comp)
|
||||
return [node], True
|
||||
return _stmts, False
|
||||
|
||||
|
||||
class ParentAssignTypeMixin(AssignTypeMixin):
|
||||
|
||||
def ass_type(self):
|
||||
return self.parent.ass_type()
|
||||
|
||||
|
||||
class FromImportMixIn(FilterStmtsMixin):
|
||||
"""MixIn for From and Import Nodes"""
|
||||
|
||||
def _infer_name(self, frame, name):
|
||||
return name
|
||||
|
||||
def do_import_module(self, modname=None):
|
||||
"""return the ast for a module whose name is <modname> imported by <self>
|
||||
"""
|
||||
# handle special case where we are on a package node importing a module
|
||||
# using the same name as the package, which may end in an infinite loop
|
||||
# on relative imports
|
||||
# XXX: no more needed ?
|
||||
mymodule = self.root()
|
||||
level = getattr(self, 'level', None) # Import as no level
|
||||
if modname is None:
|
||||
modname = self.modname
|
||||
# XXX we should investigate deeper if we really want to check
|
||||
# importing itself: modname and mymodule.name be relative or absolute
|
||||
if mymodule.relative_to_absolute_name(modname, level) == mymodule.name:
|
||||
# FIXME: we used to raise InferenceError here, but why ?
|
||||
return mymodule
|
||||
try:
|
||||
return mymodule.import_module(modname, level=level)
|
||||
except AstroidBuildingException:
|
||||
raise InferenceError(modname)
|
||||
except SyntaxError as ex:
|
||||
raise InferenceError(str(ex))
|
||||
|
||||
def real_name(self, asname):
|
||||
"""get name from 'as' name"""
|
||||
for name, _asname in self.names:
|
||||
if name == '*':
|
||||
return asname
|
||||
if not _asname:
|
||||
name = name.split('.', 1)[0]
|
||||
_asname = name
|
||||
if asname == _asname:
|
||||
return name
|
||||
raise NotFoundError(asname)
|
||||
|
||||
670
plugins/bundle/python-mode/pymode/libs/astroid/modutils.py
Normal file
670
plugins/bundle/python-mode/pymode/libs/astroid/modutils.py
Normal file
|
|
@ -0,0 +1,670 @@
|
|||
# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
|
||||
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
|
||||
#
|
||||
# This file is part of astroid.
|
||||
#
|
||||
# astroid is free software: you can redistribute it and/or modify it under
|
||||
# the terms of the GNU Lesser General Public License as published by the Free
|
||||
# Software Foundation, either version 2.1 of the License, or (at your option) any
|
||||
# later version.
|
||||
#
|
||||
# astroid is distributed in the hope that it will be useful, but WITHOUT
|
||||
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
|
||||
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
|
||||
# details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License along
|
||||
# with astroid. If not, see <http://www.gnu.org/licenses/>.
|
||||
"""Python modules manipulation utility functions.
|
||||
|
||||
:type PY_SOURCE_EXTS: tuple(str)
|
||||
:var PY_SOURCE_EXTS: list of possible python source file extension
|
||||
|
||||
:type STD_LIB_DIRS: set of str
|
||||
:var STD_LIB_DIRS: directories where standard modules are located
|
||||
|
||||
:type BUILTIN_MODULES: dict
|
||||
:var BUILTIN_MODULES: dictionary with builtin module names has key
|
||||
"""
|
||||
from __future__ import with_statement
|
||||
|
||||
__docformat__ = "restructuredtext en"
|
||||
|
||||
import imp
|
||||
import os
|
||||
import sys
|
||||
from distutils.sysconfig import get_python_lib
|
||||
from distutils.errors import DistutilsPlatformError
|
||||
import zipimport
|
||||
|
||||
try:
|
||||
import pkg_resources
|
||||
except ImportError:
|
||||
pkg_resources = None
|
||||
|
||||
from logilab.common import _handle_blacklist
|
||||
|
||||
PY_ZIPMODULE = object()
|
||||
|
||||
if sys.platform.startswith('win'):
|
||||
PY_SOURCE_EXTS = ('py', 'pyw')
|
||||
PY_COMPILED_EXTS = ('dll', 'pyd')
|
||||
else:
|
||||
PY_SOURCE_EXTS = ('py',)
|
||||
PY_COMPILED_EXTS = ('so',)
|
||||
|
||||
# Notes about STD_LIB_DIRS
|
||||
# Consider arch-specific installation for STD_LIB_DIRS definition
|
||||
# :mod:`distutils.sysconfig` contains to much hardcoded values to rely on
|
||||
#
|
||||
# :see: `Problems with /usr/lib64 builds <http://bugs.python.org/issue1294959>`_
|
||||
# :see: `FHS <http://www.pathname.com/fhs/pub/fhs-2.3.html#LIBLTQUALGTALTERNATEFORMATESSENTIAL>`_
|
||||
try:
|
||||
# The explicit sys.prefix is to work around a patch in virtualenv that
|
||||
# replaces the 'real' sys.prefix (i.e. the location of the binary)
|
||||
# with the prefix from which the virtualenv was created. This throws
|
||||
# off the detection logic for standard library modules, thus the
|
||||
# workaround.
|
||||
STD_LIB_DIRS = set([
|
||||
get_python_lib(standard_lib=True, prefix=sys.prefix),
|
||||
# Take care of installations where exec_prefix != prefix.
|
||||
get_python_lib(standard_lib=True, prefix=sys.exec_prefix),
|
||||
get_python_lib(standard_lib=True)])
|
||||
if os.name == 'nt':
|
||||
STD_LIB_DIRS.add(os.path.join(sys.prefix, 'dlls'))
|
||||
try:
|
||||
# real_prefix is defined when running inside virtualenv.
|
||||
STD_LIB_DIRS.add(os.path.join(sys.real_prefix, 'dlls'))
|
||||
except AttributeError:
|
||||
pass
|
||||
# get_python_lib(standard_lib=1) is not available on pypy, set STD_LIB_DIR to
|
||||
# non-valid path, see https://bugs.pypy.org/issue1164
|
||||
except DistutilsPlatformError:
|
||||
STD_LIB_DIRS = set()
|
||||
|
||||
EXT_LIB_DIR = get_python_lib()
|
||||
|
||||
BUILTIN_MODULES = dict(zip(sys.builtin_module_names,
|
||||
[1]*len(sys.builtin_module_names)))
|
||||
|
||||
|
||||
class NoSourceFile(Exception):
|
||||
"""exception raised when we are not able to get a python
|
||||
source file for a precompiled file
|
||||
"""
|
||||
|
||||
def _normalize_path(path):
|
||||
return os.path.normcase(os.path.abspath(path))
|
||||
|
||||
|
||||
_NORM_PATH_CACHE = {}
|
||||
|
||||
def _cache_normalize_path(path):
|
||||
"""abspath with caching"""
|
||||
# _module_file calls abspath on every path in sys.path every time it's
|
||||
# called; on a larger codebase this easily adds up to half a second just
|
||||
# assembling path components. This cache alleviates that.
|
||||
try:
|
||||
return _NORM_PATH_CACHE[path]
|
||||
except KeyError:
|
||||
if not path: # don't cache result for ''
|
||||
return _normalize_path(path)
|
||||
result = _NORM_PATH_CACHE[path] = _normalize_path(path)
|
||||
return result
|
||||
|
||||
def load_module_from_name(dotted_name, path=None, use_sys=1):
|
||||
"""Load a Python module from its name.
|
||||
|
||||
:type dotted_name: str
|
||||
:param dotted_name: python name of a module or package
|
||||
|
||||
:type path: list or None
|
||||
:param path:
|
||||
optional list of path where the module or package should be
|
||||
searched (use sys.path if nothing or None is given)
|
||||
|
||||
:type use_sys: bool
|
||||
:param use_sys:
|
||||
boolean indicating whether the sys.modules dictionary should be
|
||||
used or not
|
||||
|
||||
|
||||
:raise ImportError: if the module or package is not found
|
||||
|
||||
:rtype: module
|
||||
:return: the loaded module
|
||||
"""
|
||||
return load_module_from_modpath(dotted_name.split('.'), path, use_sys)
|
||||
|
||||
|
||||
def load_module_from_modpath(parts, path=None, use_sys=1):
|
||||
"""Load a python module from its splitted name.
|
||||
|
||||
:type parts: list(str) or tuple(str)
|
||||
:param parts:
|
||||
python name of a module or package splitted on '.'
|
||||
|
||||
:type path: list or None
|
||||
:param path:
|
||||
optional list of path where the module or package should be
|
||||
searched (use sys.path if nothing or None is given)
|
||||
|
||||
:type use_sys: bool
|
||||
:param use_sys:
|
||||
boolean indicating whether the sys.modules dictionary should be used or not
|
||||
|
||||
:raise ImportError: if the module or package is not found
|
||||
|
||||
:rtype: module
|
||||
:return: the loaded module
|
||||
"""
|
||||
if use_sys:
|
||||
try:
|
||||
return sys.modules['.'.join(parts)]
|
||||
except KeyError:
|
||||
pass
|
||||
modpath = []
|
||||
prevmodule = None
|
||||
for part in parts:
|
||||
modpath.append(part)
|
||||
curname = '.'.join(modpath)
|
||||
module = None
|
||||
if len(modpath) != len(parts):
|
||||
# even with use_sys=False, should try to get outer packages from sys.modules
|
||||
module = sys.modules.get(curname)
|
||||
elif use_sys:
|
||||
# because it may have been indirectly loaded through a parent
|
||||
module = sys.modules.get(curname)
|
||||
if module is None:
|
||||
mp_file, mp_filename, mp_desc = imp.find_module(part, path)
|
||||
module = imp.load_module(curname, mp_file, mp_filename, mp_desc)
|
||||
# mp_file still needs to be closed.
|
||||
if mp_file:
|
||||
mp_file.close()
|
||||
if prevmodule:
|
||||
setattr(prevmodule, part, module)
|
||||
_file = getattr(module, '__file__', '')
|
||||
if not _file and len(modpath) != len(parts):
|
||||
raise ImportError('no module in %s' % '.'.join(parts[len(modpath):]))
|
||||
path = [os.path.dirname(_file)]
|
||||
prevmodule = module
|
||||
return module
|
||||
|
||||
|
||||
def load_module_from_file(filepath, path=None, use_sys=1, extrapath=None):
|
||||
"""Load a Python module from it's path.
|
||||
|
||||
:type filepath: str
|
||||
:param filepath: path to the python module or package
|
||||
|
||||
:type path: list or None
|
||||
:param path:
|
||||
optional list of path where the module or package should be
|
||||
searched (use sys.path if nothing or None is given)
|
||||
|
||||
:type use_sys: bool
|
||||
:param use_sys:
|
||||
boolean indicating whether the sys.modules dictionary should be
|
||||
used or not
|
||||
|
||||
|
||||
:raise ImportError: if the module or package is not found
|
||||
|
||||
:rtype: module
|
||||
:return: the loaded module
|
||||
"""
|
||||
modpath = modpath_from_file(filepath, extrapath)
|
||||
return load_module_from_modpath(modpath, path, use_sys)
|
||||
|
||||
|
||||
def _check_init(path, mod_path):
|
||||
"""check there are some __init__.py all along the way"""
|
||||
for part in mod_path:
|
||||
path = os.path.join(path, part)
|
||||
if not _has_init(path):
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def modpath_from_file(filename, extrapath=None):
|
||||
"""given a file path return the corresponding splitted module's name
|
||||
(i.e name of a module or package splitted on '.')
|
||||
|
||||
:type filename: str
|
||||
:param filename: file's path for which we want the module's name
|
||||
|
||||
:type extrapath: dict
|
||||
:param extrapath:
|
||||
optional extra search path, with path as key and package name for the path
|
||||
as value. This is usually useful to handle package splitted in multiple
|
||||
directories using __path__ trick.
|
||||
|
||||
|
||||
:raise ImportError:
|
||||
if the corresponding module's name has not been found
|
||||
|
||||
:rtype: list(str)
|
||||
:return: the corresponding splitted module's name
|
||||
"""
|
||||
base = os.path.splitext(os.path.abspath(filename))[0]
|
||||
if extrapath is not None:
|
||||
for path_ in extrapath:
|
||||
path = os.path.abspath(path_)
|
||||
if path and os.path.normcase(base[:len(path)]) == os.path.normcase(path):
|
||||
submodpath = [pkg for pkg in base[len(path):].split(os.sep)
|
||||
if pkg]
|
||||
if _check_init(path, submodpath[:-1]):
|
||||
return extrapath[path_].split('.') + submodpath
|
||||
for path in sys.path:
|
||||
path = _cache_normalize_path(path)
|
||||
if path and os.path.normcase(base).startswith(path):
|
||||
modpath = [pkg for pkg in base[len(path):].split(os.sep) if pkg]
|
||||
if _check_init(path, modpath[:-1]):
|
||||
return modpath
|
||||
raise ImportError('Unable to find module for %s in %s' % (
|
||||
filename, ', \n'.join(sys.path)))
|
||||
|
||||
|
||||
def file_from_modpath(modpath, path=None, context_file=None):
|
||||
return file_info_from_modpath(modpath, path, context_file)[0]
|
||||
|
||||
def file_info_from_modpath(modpath, path=None, context_file=None):
|
||||
"""given a mod path (i.e. splitted module / package name), return the
|
||||
corresponding file, giving priority to source file over precompiled
|
||||
file if it exists
|
||||
|
||||
:type modpath: list or tuple
|
||||
:param modpath:
|
||||
splitted module's name (i.e name of a module or package splitted
|
||||
on '.')
|
||||
(this means explicit relative imports that start with dots have
|
||||
empty strings in this list!)
|
||||
|
||||
:type path: list or None
|
||||
:param path:
|
||||
optional list of path where the module or package should be
|
||||
searched (use sys.path if nothing or None is given)
|
||||
|
||||
:type context_file: str or None
|
||||
:param context_file:
|
||||
context file to consider, necessary if the identifier has been
|
||||
introduced using a relative import unresolvable in the actual
|
||||
context (i.e. modutils)
|
||||
|
||||
:raise ImportError: if there is no such module in the directory
|
||||
|
||||
:rtype: (str or None, import type)
|
||||
:return:
|
||||
the path to the module's file or None if it's an integrated
|
||||
builtin module such as 'sys'
|
||||
"""
|
||||
if context_file is not None:
|
||||
context = os.path.dirname(context_file)
|
||||
else:
|
||||
context = context_file
|
||||
if modpath[0] == 'xml':
|
||||
# handle _xmlplus
|
||||
try:
|
||||
return _file_from_modpath(['_xmlplus'] + modpath[1:], path, context)
|
||||
except ImportError:
|
||||
return _file_from_modpath(modpath, path, context)
|
||||
elif modpath == ['os', 'path']:
|
||||
# FIXME: currently ignoring search_path...
|
||||
return os.path.__file__, imp.PY_SOURCE
|
||||
return _file_from_modpath(modpath, path, context)
|
||||
|
||||
|
||||
def get_module_part(dotted_name, context_file=None):
|
||||
"""given a dotted name return the module part of the name :
|
||||
|
||||
>>> get_module_part('logilab.common.modutils.get_module_part')
|
||||
'logilab.common.modutils'
|
||||
|
||||
:type dotted_name: str
|
||||
:param dotted_name: full name of the identifier we are interested in
|
||||
|
||||
:type context_file: str or None
|
||||
:param context_file:
|
||||
context file to consider, necessary if the identifier has been
|
||||
introduced using a relative import unresolvable in the actual
|
||||
context (i.e. modutils)
|
||||
|
||||
|
||||
:raise ImportError: if there is no such module in the directory
|
||||
|
||||
:rtype: str or None
|
||||
:return:
|
||||
the module part of the name or None if we have not been able at
|
||||
all to import the given name
|
||||
|
||||
XXX: deprecated, since it doesn't handle package precedence over module
|
||||
(see #10066)
|
||||
"""
|
||||
# os.path trick
|
||||
if dotted_name.startswith('os.path'):
|
||||
return 'os.path'
|
||||
parts = dotted_name.split('.')
|
||||
if context_file is not None:
|
||||
# first check for builtin module which won't be considered latter
|
||||
# in that case (path != None)
|
||||
if parts[0] in BUILTIN_MODULES:
|
||||
if len(parts) > 2:
|
||||
raise ImportError(dotted_name)
|
||||
return parts[0]
|
||||
# don't use += or insert, we want a new list to be created !
|
||||
path = None
|
||||
starti = 0
|
||||
if parts[0] == '':
|
||||
assert context_file is not None, \
|
||||
'explicit relative import, but no context_file?'
|
||||
path = [] # prevent resolving the import non-relatively
|
||||
starti = 1
|
||||
while parts[starti] == '': # for all further dots: change context
|
||||
starti += 1
|
||||
context_file = os.path.dirname(context_file)
|
||||
for i in range(starti, len(parts)):
|
||||
try:
|
||||
file_from_modpath(parts[starti:i+1], path=path,
|
||||
context_file=context_file)
|
||||
except ImportError:
|
||||
if not i >= max(1, len(parts) - 2):
|
||||
raise
|
||||
return '.'.join(parts[:i])
|
||||
return dotted_name
|
||||
|
||||
|
||||
def get_module_files(src_directory, blacklist):
|
||||
"""given a package directory return a list of all available python
|
||||
module's files in the package and its subpackages
|
||||
|
||||
:type src_directory: str
|
||||
:param src_directory:
|
||||
path of the directory corresponding to the package
|
||||
|
||||
:type blacklist: list or tuple
|
||||
:param blacklist:
|
||||
optional list of files or directory to ignore, default to the value of
|
||||
`logilab.common.STD_BLACKLIST`
|
||||
|
||||
:rtype: list
|
||||
:return:
|
||||
the list of all available python module's files in the package and
|
||||
its subpackages
|
||||
"""
|
||||
files = []
|
||||
for directory, dirnames, filenames in os.walk(src_directory):
|
||||
_handle_blacklist(blacklist, dirnames, filenames)
|
||||
# check for __init__.py
|
||||
if not '__init__.py' in filenames:
|
||||
dirnames[:] = ()
|
||||
continue
|
||||
for filename in filenames:
|
||||
if _is_python_file(filename):
|
||||
src = os.path.join(directory, filename)
|
||||
files.append(src)
|
||||
return files
|
||||
|
||||
|
||||
def get_source_file(filename, include_no_ext=False):
|
||||
"""given a python module's file name return the matching source file
|
||||
name (the filename will be returned identically if it's a already an
|
||||
absolute path to a python source file...)
|
||||
|
||||
:type filename: str
|
||||
:param filename: python module's file name
|
||||
|
||||
|
||||
:raise NoSourceFile: if no source file exists on the file system
|
||||
|
||||
:rtype: str
|
||||
:return: the absolute path of the source file if it exists
|
||||
"""
|
||||
base, orig_ext = os.path.splitext(os.path.abspath(filename))
|
||||
for ext in PY_SOURCE_EXTS:
|
||||
source_path = '%s.%s' % (base, ext)
|
||||
if os.path.exists(source_path):
|
||||
return source_path
|
||||
if include_no_ext and not orig_ext and os.path.exists(base):
|
||||
return base
|
||||
raise NoSourceFile(filename)
|
||||
|
||||
|
||||
def is_python_source(filename):
|
||||
"""
|
||||
rtype: bool
|
||||
return: True if the filename is a python source file
|
||||
"""
|
||||
return os.path.splitext(filename)[1][1:] in PY_SOURCE_EXTS
|
||||
|
||||
|
||||
def is_standard_module(modname, std_path=None):
|
||||
"""try to guess if a module is a standard python module (by default,
|
||||
see `std_path` parameter's description)
|
||||
|
||||
:type modname: str
|
||||
:param modname: name of the module we are interested in
|
||||
|
||||
:type std_path: list(str) or tuple(str)
|
||||
:param std_path: list of path considered has standard
|
||||
|
||||
|
||||
:rtype: bool
|
||||
:return:
|
||||
true if the module:
|
||||
- is located on the path listed in one of the directory in `std_path`
|
||||
- is a built-in module
|
||||
"""
|
||||
modname = modname.split('.')[0]
|
||||
try:
|
||||
filename = file_from_modpath([modname])
|
||||
except ImportError:
|
||||
# import failed, i'm probably not so wrong by supposing it's
|
||||
# not standard...
|
||||
return False
|
||||
# modules which are not living in a file are considered standard
|
||||
# (sys and __builtin__ for instance)
|
||||
if filename is None:
|
||||
return True
|
||||
filename = _normalize_path(filename)
|
||||
if filename.startswith(_cache_normalize_path(EXT_LIB_DIR)):
|
||||
return False
|
||||
if std_path is None:
|
||||
std_path = STD_LIB_DIRS
|
||||
for path in std_path:
|
||||
if filename.startswith(_cache_normalize_path(path)):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
|
||||
def is_relative(modname, from_file):
|
||||
"""return true if the given module name is relative to the given
|
||||
file name
|
||||
|
||||
:type modname: str
|
||||
:param modname: name of the module we are interested in
|
||||
|
||||
:type from_file: str
|
||||
:param from_file:
|
||||
path of the module from which modname has been imported
|
||||
|
||||
:rtype: bool
|
||||
:return:
|
||||
true if the module has been imported relatively to `from_file`
|
||||
"""
|
||||
if not os.path.isdir(from_file):
|
||||
from_file = os.path.dirname(from_file)
|
||||
if from_file in sys.path:
|
||||
return False
|
||||
try:
|
||||
stream, _, _ = imp.find_module(modname.split('.')[0], [from_file])
|
||||
|
||||
# Close the stream to avoid ResourceWarnings.
|
||||
if stream:
|
||||
stream.close()
|
||||
return True
|
||||
except ImportError:
|
||||
return False
|
||||
|
||||
|
||||
# internal only functions #####################################################
|
||||
|
||||
def _file_from_modpath(modpath, path=None, context=None):
|
||||
"""given a mod path (i.e. splitted module / package name), return the
|
||||
corresponding file
|
||||
|
||||
this function is used internally, see `file_from_modpath`'s
|
||||
documentation for more information
|
||||
"""
|
||||
assert len(modpath) > 0
|
||||
if context is not None:
|
||||
try:
|
||||
mtype, mp_filename = _module_file(modpath, [context])
|
||||
except ImportError:
|
||||
mtype, mp_filename = _module_file(modpath, path)
|
||||
else:
|
||||
mtype, mp_filename = _module_file(modpath, path)
|
||||
if mtype == imp.PY_COMPILED:
|
||||
try:
|
||||
return get_source_file(mp_filename), imp.PY_SOURCE
|
||||
except NoSourceFile:
|
||||
return mp_filename, imp.PY_COMPILED
|
||||
elif mtype == imp.C_BUILTIN:
|
||||
# integrated builtin module
|
||||
return None, imp.C_BUILTIN
|
||||
elif mtype == imp.PKG_DIRECTORY:
|
||||
mp_filename = _has_init(mp_filename)
|
||||
mtype = imp.PY_SOURCE
|
||||
return mp_filename, mtype
|
||||
|
||||
def _search_zip(modpath, pic):
|
||||
for filepath, importer in pic.items():
|
||||
if importer is not None:
|
||||
if importer.find_module(modpath[0]):
|
||||
if not importer.find_module(os.path.sep.join(modpath)):
|
||||
raise ImportError('No module named %s in %s/%s' % (
|
||||
'.'.join(modpath[1:]), filepath, modpath))
|
||||
return PY_ZIPMODULE, os.path.abspath(filepath) + os.path.sep + os.path.sep.join(modpath), filepath
|
||||
raise ImportError('No module named %s' % '.'.join(modpath))
|
||||
|
||||
|
||||
def _module_file(modpath, path=None):
|
||||
"""get a module type / file path
|
||||
|
||||
:type modpath: list or tuple
|
||||
:param modpath:
|
||||
splitted module's name (i.e name of a module or package splitted
|
||||
on '.'), with leading empty strings for explicit relative import
|
||||
|
||||
:type path: list or None
|
||||
:param path:
|
||||
optional list of path where the module or package should be
|
||||
searched (use sys.path if nothing or None is given)
|
||||
|
||||
|
||||
:rtype: tuple(int, str)
|
||||
:return: the module type flag and the file path for a module
|
||||
"""
|
||||
# egg support compat
|
||||
try:
|
||||
pic = sys.path_importer_cache
|
||||
_path = (path is None and sys.path or path)
|
||||
for __path in _path:
|
||||
if not __path in pic:
|
||||
try:
|
||||
pic[__path] = zipimport.zipimporter(__path)
|
||||
except zipimport.ZipImportError:
|
||||
pic[__path] = None
|
||||
checkeggs = True
|
||||
except AttributeError:
|
||||
checkeggs = False
|
||||
# pkg_resources support (aka setuptools namespace packages)
|
||||
if (pkg_resources is not None
|
||||
and modpath[0] in pkg_resources._namespace_packages
|
||||
and modpath[0] in sys.modules
|
||||
and len(modpath) > 1):
|
||||
# setuptools has added into sys.modules a module object with proper
|
||||
# __path__, get back information from there
|
||||
module = sys.modules[modpath.pop(0)]
|
||||
path = module.__path__
|
||||
imported = []
|
||||
while modpath:
|
||||
modname = modpath[0]
|
||||
# take care to changes in find_module implementation wrt builtin modules
|
||||
#
|
||||
# Python 2.6.6 (r266:84292, Sep 11 2012, 08:34:23)
|
||||
# >>> imp.find_module('posix')
|
||||
# (None, 'posix', ('', '', 6))
|
||||
#
|
||||
# Python 3.3.1 (default, Apr 26 2013, 12:08:46)
|
||||
# >>> imp.find_module('posix')
|
||||
# (None, None, ('', '', 6))
|
||||
try:
|
||||
stream, mp_filename, mp_desc = imp.find_module(modname, path)
|
||||
except ImportError:
|
||||
if checkeggs:
|
||||
return _search_zip(modpath, pic)[:2]
|
||||
raise
|
||||
else:
|
||||
# Don't forget to close the stream to avoid
|
||||
# spurious ResourceWarnings.
|
||||
if stream:
|
||||
stream.close()
|
||||
|
||||
if checkeggs and mp_filename:
|
||||
fullabspath = [_cache_normalize_path(x) for x in _path]
|
||||
try:
|
||||
pathindex = fullabspath.index(os.path.dirname(_normalize_path(mp_filename)))
|
||||
emtype, emp_filename, zippath = _search_zip(modpath, pic)
|
||||
if pathindex > _path.index(zippath):
|
||||
# an egg takes priority
|
||||
return emtype, emp_filename
|
||||
except ValueError:
|
||||
# XXX not in _path
|
||||
pass
|
||||
except ImportError:
|
||||
pass
|
||||
checkeggs = False
|
||||
imported.append(modpath.pop(0))
|
||||
mtype = mp_desc[2]
|
||||
if modpath:
|
||||
if mtype != imp.PKG_DIRECTORY:
|
||||
raise ImportError('No module %s in %s' % ('.'.join(modpath),
|
||||
'.'.join(imported)))
|
||||
# XXX guess if package is using pkgutil.extend_path by looking for
|
||||
# those keywords in the first four Kbytes
|
||||
try:
|
||||
with open(os.path.join(mp_filename, '__init__.py'), 'rb') as stream:
|
||||
data = stream.read(4096)
|
||||
except IOError:
|
||||
path = [mp_filename]
|
||||
else:
|
||||
if b'pkgutil' in data and b'extend_path' in data:
|
||||
# extend_path is called, search sys.path for module/packages
|
||||
# of this name see pkgutil.extend_path documentation
|
||||
path = [os.path.join(p, *imported) for p in sys.path
|
||||
if os.path.isdir(os.path.join(p, *imported))]
|
||||
else:
|
||||
path = [mp_filename]
|
||||
return mtype, mp_filename
|
||||
|
||||
def _is_python_file(filename):
|
||||
"""return true if the given filename should be considered as a python file
|
||||
|
||||
.pyc and .pyo are ignored
|
||||
"""
|
||||
for ext in ('.py', '.so', '.pyd', '.pyw'):
|
||||
if filename.endswith(ext):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def _has_init(directory):
|
||||
"""if the given directory has a valid __init__ file, return its path,
|
||||
else return None
|
||||
"""
|
||||
mod_or_pack = os.path.join(directory, '__init__')
|
||||
for ext in PY_SOURCE_EXTS + ('pyc', 'pyo'):
|
||||
if os.path.exists(mod_or_pack + '.' + ext):
|
||||
return mod_or_pack + '.' + ext
|
||||
return None
|
||||
966
plugins/bundle/python-mode/pymode/libs/astroid/node_classes.py
Normal file
966
plugins/bundle/python-mode/pymode/libs/astroid/node_classes.py
Normal file
|
|
@ -0,0 +1,966 @@
|
|||
# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
|
||||
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
|
||||
#
|
||||
# This file is part of astroid.
|
||||
#
|
||||
# astroid is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by the
|
||||
# Free Software Foundation, either version 2.1 of the License, or (at your
|
||||
# option) any later version.
|
||||
#
|
||||
# astroid is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
|
||||
# for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License along
|
||||
# with astroid. If not, see <http://www.gnu.org/licenses/>.
|
||||
"""Module for some node classes. More nodes in scoped_nodes.py
|
||||
"""
|
||||
|
||||
import sys
|
||||
|
||||
import six
|
||||
from logilab.common.decorators import cachedproperty
|
||||
|
||||
from astroid.exceptions import NoDefault
|
||||
from astroid.bases import (NodeNG, Statement, Instance, InferenceContext,
|
||||
_infer_stmts, YES, BUILTINS)
|
||||
from astroid.mixins import (BlockRangeMixIn, AssignTypeMixin,
|
||||
ParentAssignTypeMixin, FromImportMixIn)
|
||||
|
||||
PY3K = sys.version_info >= (3, 0)
|
||||
|
||||
|
||||
def unpack_infer(stmt, context=None):
|
||||
"""recursively generate nodes inferred by the given statement.
|
||||
If the inferred value is a list or a tuple, recurse on the elements
|
||||
"""
|
||||
if isinstance(stmt, (List, Tuple)):
|
||||
for elt in stmt.elts:
|
||||
for infered_elt in unpack_infer(elt, context):
|
||||
yield infered_elt
|
||||
return
|
||||
# if infered is a final node, return it and stop
|
||||
infered = next(stmt.infer(context))
|
||||
if infered is stmt:
|
||||
yield infered
|
||||
return
|
||||
# else, infer recursivly, except YES object that should be returned as is
|
||||
for infered in stmt.infer(context):
|
||||
if infered is YES:
|
||||
yield infered
|
||||
else:
|
||||
for inf_inf in unpack_infer(infered, context):
|
||||
yield inf_inf
|
||||
|
||||
|
||||
def are_exclusive(stmt1, stmt2, exceptions=None):
|
||||
"""return true if the two given statements are mutually exclusive
|
||||
|
||||
`exceptions` may be a list of exception names. If specified, discard If
|
||||
branches and check one of the statement is in an exception handler catching
|
||||
one of the given exceptions.
|
||||
|
||||
algorithm :
|
||||
1) index stmt1's parents
|
||||
2) climb among stmt2's parents until we find a common parent
|
||||
3) if the common parent is a If or TryExcept statement, look if nodes are
|
||||
in exclusive branches
|
||||
"""
|
||||
# index stmt1's parents
|
||||
stmt1_parents = {}
|
||||
children = {}
|
||||
node = stmt1.parent
|
||||
previous = stmt1
|
||||
while node:
|
||||
stmt1_parents[node] = 1
|
||||
children[node] = previous
|
||||
previous = node
|
||||
node = node.parent
|
||||
# climb among stmt2's parents until we find a common parent
|
||||
node = stmt2.parent
|
||||
previous = stmt2
|
||||
while node:
|
||||
if node in stmt1_parents:
|
||||
# if the common parent is a If or TryExcept statement, look if
|
||||
# nodes are in exclusive branches
|
||||
if isinstance(node, If) and exceptions is None:
|
||||
if (node.locate_child(previous)[1]
|
||||
is not node.locate_child(children[node])[1]):
|
||||
return True
|
||||
elif isinstance(node, TryExcept):
|
||||
c2attr, c2node = node.locate_child(previous)
|
||||
c1attr, c1node = node.locate_child(children[node])
|
||||
if c1node is not c2node:
|
||||
if ((c2attr == 'body' and c1attr == 'handlers' and children[node].catch(exceptions)) or
|
||||
(c2attr == 'handlers' and c1attr == 'body' and previous.catch(exceptions)) or
|
||||
(c2attr == 'handlers' and c1attr == 'orelse') or
|
||||
(c2attr == 'orelse' and c1attr == 'handlers')):
|
||||
return True
|
||||
elif c2attr == 'handlers' and c1attr == 'handlers':
|
||||
return previous is not children[node]
|
||||
return False
|
||||
previous = node
|
||||
node = node.parent
|
||||
return False
|
||||
|
||||
|
||||
class LookupMixIn(object):
|
||||
"""Mixin looking up a name in the right scope
|
||||
"""
|
||||
|
||||
def lookup(self, name):
|
||||
"""lookup a variable name
|
||||
|
||||
return the scope node and the list of assignments associated to the
|
||||
given name according to the scope where it has been found (locals,
|
||||
globals or builtin)
|
||||
|
||||
The lookup is starting from self's scope. If self is not a frame itself
|
||||
and the name is found in the inner frame locals, statements will be
|
||||
filtered to remove ignorable statements according to self's location
|
||||
"""
|
||||
return self.scope().scope_lookup(self, name)
|
||||
|
||||
def ilookup(self, name):
|
||||
"""infered lookup
|
||||
|
||||
return an iterator on infered values of the statements returned by
|
||||
the lookup method
|
||||
"""
|
||||
frame, stmts = self.lookup(name)
|
||||
context = InferenceContext()
|
||||
return _infer_stmts(stmts, context, frame)
|
||||
|
||||
def _filter_stmts(self, stmts, frame, offset):
|
||||
"""filter statements to remove ignorable statements.
|
||||
|
||||
If self is not a frame itself and the name is found in the inner
|
||||
frame locals, statements will be filtered to remove ignorable
|
||||
statements according to self's location
|
||||
"""
|
||||
# if offset == -1, my actual frame is not the inner frame but its parent
|
||||
#
|
||||
# class A(B): pass
|
||||
#
|
||||
# we need this to resolve B correctly
|
||||
if offset == -1:
|
||||
myframe = self.frame().parent.frame()
|
||||
else:
|
||||
myframe = self.frame()
|
||||
# If the frame of this node is the same as the statement
|
||||
# of this node, then the node is part of a class or
|
||||
# a function definition and the frame of this node should be the
|
||||
# the upper frame, not the frame of the definition.
|
||||
# For more information why this is important,
|
||||
# see Pylint issue #295.
|
||||
# For example, for 'b', the statement is the same
|
||||
# as the frame / scope:
|
||||
#
|
||||
# def test(b=1):
|
||||
# ...
|
||||
|
||||
if self.statement() is myframe and myframe.parent:
|
||||
myframe = myframe.parent.frame()
|
||||
if not myframe is frame or self is frame:
|
||||
return stmts
|
||||
mystmt = self.statement()
|
||||
# line filtering if we are in the same frame
|
||||
#
|
||||
# take care node may be missing lineno information (this is the case for
|
||||
# nodes inserted for living objects)
|
||||
if myframe is frame and mystmt.fromlineno is not None:
|
||||
assert mystmt.fromlineno is not None, mystmt
|
||||
mylineno = mystmt.fromlineno + offset
|
||||
else:
|
||||
# disabling lineno filtering
|
||||
mylineno = 0
|
||||
_stmts = []
|
||||
_stmt_parents = []
|
||||
for node in stmts:
|
||||
stmt = node.statement()
|
||||
# line filtering is on and we have reached our location, break
|
||||
if mylineno > 0 and stmt.fromlineno > mylineno:
|
||||
break
|
||||
assert hasattr(node, 'ass_type'), (node, node.scope(),
|
||||
node.scope().locals)
|
||||
ass_type = node.ass_type()
|
||||
|
||||
if node.has_base(self):
|
||||
break
|
||||
|
||||
_stmts, done = ass_type._get_filtered_stmts(self, node, _stmts, mystmt)
|
||||
if done:
|
||||
break
|
||||
|
||||
optional_assign = ass_type.optional_assign
|
||||
if optional_assign and ass_type.parent_of(self):
|
||||
# we are inside a loop, loop var assigment is hidding previous
|
||||
# assigment
|
||||
_stmts = [node]
|
||||
_stmt_parents = [stmt.parent]
|
||||
continue
|
||||
|
||||
# XXX comment various branches below!!!
|
||||
try:
|
||||
pindex = _stmt_parents.index(stmt.parent)
|
||||
except ValueError:
|
||||
pass
|
||||
else:
|
||||
# we got a parent index, this means the currently visited node
|
||||
# is at the same block level as a previously visited node
|
||||
if _stmts[pindex].ass_type().parent_of(ass_type):
|
||||
# both statements are not at the same block level
|
||||
continue
|
||||
# if currently visited node is following previously considered
|
||||
# assignement and both are not exclusive, we can drop the
|
||||
# previous one. For instance in the following code ::
|
||||
#
|
||||
# if a:
|
||||
# x = 1
|
||||
# else:
|
||||
# x = 2
|
||||
# print x
|
||||
#
|
||||
# we can't remove neither x = 1 nor x = 2 when looking for 'x'
|
||||
# of 'print x'; while in the following ::
|
||||
#
|
||||
# x = 1
|
||||
# x = 2
|
||||
# print x
|
||||
#
|
||||
# we can remove x = 1 when we see x = 2
|
||||
#
|
||||
# moreover, on loop assignment types, assignment won't
|
||||
# necessarily be done if the loop has no iteration, so we don't
|
||||
# want to clear previous assigments if any (hence the test on
|
||||
# optional_assign)
|
||||
if not (optional_assign or are_exclusive(_stmts[pindex], node)):
|
||||
del _stmt_parents[pindex]
|
||||
del _stmts[pindex]
|
||||
if isinstance(node, AssName):
|
||||
if not optional_assign and stmt.parent is mystmt.parent:
|
||||
_stmts = []
|
||||
_stmt_parents = []
|
||||
elif isinstance(node, DelName):
|
||||
_stmts = []
|
||||
_stmt_parents = []
|
||||
continue
|
||||
if not are_exclusive(self, node):
|
||||
_stmts.append(node)
|
||||
_stmt_parents.append(stmt.parent)
|
||||
return _stmts
|
||||
|
||||
# Name classes
|
||||
|
||||
class AssName(LookupMixIn, ParentAssignTypeMixin, NodeNG):
|
||||
"""class representing an AssName node"""
|
||||
|
||||
|
||||
class DelName(LookupMixIn, ParentAssignTypeMixin, NodeNG):
|
||||
"""class representing a DelName node"""
|
||||
|
||||
|
||||
class Name(LookupMixIn, NodeNG):
|
||||
"""class representing a Name node"""
|
||||
|
||||
|
||||
|
||||
|
||||
##################### node classes ########################################
|
||||
|
||||
class Arguments(NodeNG, AssignTypeMixin):
|
||||
"""class representing an Arguments node"""
|
||||
if PY3K:
|
||||
# Python 3.4+ uses a different approach regarding annotations,
|
||||
# each argument is a new class, _ast.arg, which exposes an
|
||||
# 'annotation' attribute. In astroid though, arguments are exposed
|
||||
# as is in the Arguments node and the only way to expose annotations
|
||||
# is by using something similar with Python 3.3:
|
||||
# - we expose 'varargannotation' and 'kwargannotation' of annotations
|
||||
# of varargs and kwargs.
|
||||
# - we expose 'annotation', a list with annotations for
|
||||
# for each normal argument. If an argument doesn't have an
|
||||
# annotation, its value will be None.
|
||||
|
||||
_astroid_fields = ('args', 'defaults', 'kwonlyargs',
|
||||
'kw_defaults', 'annotations',
|
||||
'varargannotation', 'kwargannotation')
|
||||
annotations = None
|
||||
varargannotation = None
|
||||
kwargannotation = None
|
||||
else:
|
||||
_astroid_fields = ('args', 'defaults', 'kwonlyargs', 'kw_defaults')
|
||||
args = None
|
||||
defaults = None
|
||||
kwonlyargs = None
|
||||
kw_defaults = None
|
||||
|
||||
def __init__(self, vararg=None, kwarg=None):
|
||||
self.vararg = vararg
|
||||
self.kwarg = kwarg
|
||||
|
||||
def _infer_name(self, frame, name):
|
||||
if self.parent is frame:
|
||||
return name
|
||||
return None
|
||||
|
||||
@cachedproperty
|
||||
def fromlineno(self):
|
||||
lineno = super(Arguments, self).fromlineno
|
||||
return max(lineno, self.parent.fromlineno or 0)
|
||||
|
||||
def format_args(self):
|
||||
"""return arguments formatted as string"""
|
||||
result = []
|
||||
if self.args:
|
||||
result.append(_format_args(self.args, self.defaults))
|
||||
if self.vararg:
|
||||
result.append('*%s' % self.vararg)
|
||||
if self.kwarg:
|
||||
result.append('**%s' % self.kwarg)
|
||||
if self.kwonlyargs:
|
||||
if not self.vararg:
|
||||
result.append('*')
|
||||
result.append(_format_args(self.kwonlyargs, self.kw_defaults))
|
||||
return ', '.join(result)
|
||||
|
||||
def default_value(self, argname):
|
||||
"""return the default value for an argument
|
||||
|
||||
:raise `NoDefault`: if there is no default value defined
|
||||
"""
|
||||
i = _find_arg(argname, self.args)[0]
|
||||
if i is not None:
|
||||
idx = i - (len(self.args) - len(self.defaults))
|
||||
if idx >= 0:
|
||||
return self.defaults[idx]
|
||||
i = _find_arg(argname, self.kwonlyargs)[0]
|
||||
if i is not None and self.kw_defaults[i] is not None:
|
||||
return self.kw_defaults[i]
|
||||
raise NoDefault()
|
||||
|
||||
def is_argument(self, name):
|
||||
"""return True if the name is defined in arguments"""
|
||||
if name == self.vararg:
|
||||
return True
|
||||
if name == self.kwarg:
|
||||
return True
|
||||
return self.find_argname(name, True)[1] is not None
|
||||
|
||||
def find_argname(self, argname, rec=False):
|
||||
"""return index and Name node with given name"""
|
||||
if self.args: # self.args may be None in some cases (builtin function)
|
||||
return _find_arg(argname, self.args, rec)
|
||||
return None, None
|
||||
|
||||
def get_children(self):
|
||||
"""override get_children to skip over None elements in kw_defaults"""
|
||||
for child in super(Arguments, self).get_children():
|
||||
if child is not None:
|
||||
yield child
|
||||
|
||||
|
||||
def _find_arg(argname, args, rec=False):
|
||||
for i, arg in enumerate(args):
|
||||
if isinstance(arg, Tuple):
|
||||
if rec:
|
||||
found = _find_arg(argname, arg.elts)
|
||||
if found[0] is not None:
|
||||
return found
|
||||
elif arg.name == argname:
|
||||
return i, arg
|
||||
return None, None
|
||||
|
||||
|
||||
def _format_args(args, defaults=None):
|
||||
values = []
|
||||
if args is None:
|
||||
return ''
|
||||
if defaults is not None:
|
||||
default_offset = len(args) - len(defaults)
|
||||
for i, arg in enumerate(args):
|
||||
if isinstance(arg, Tuple):
|
||||
values.append('(%s)' % _format_args(arg.elts))
|
||||
else:
|
||||
values.append(arg.name)
|
||||
if defaults is not None and i >= default_offset:
|
||||
if defaults[i-default_offset] is not None:
|
||||
values[-1] += '=' + defaults[i-default_offset].as_string()
|
||||
return ', '.join(values)
|
||||
|
||||
|
||||
class AssAttr(NodeNG, ParentAssignTypeMixin):
|
||||
"""class representing an AssAttr node"""
|
||||
_astroid_fields = ('expr',)
|
||||
expr = None
|
||||
|
||||
class Assert(Statement):
|
||||
"""class representing an Assert node"""
|
||||
_astroid_fields = ('test', 'fail',)
|
||||
test = None
|
||||
fail = None
|
||||
|
||||
class Assign(Statement, AssignTypeMixin):
|
||||
"""class representing an Assign node"""
|
||||
_astroid_fields = ('targets', 'value',)
|
||||
targets = None
|
||||
value = None
|
||||
|
||||
class AugAssign(Statement, AssignTypeMixin):
|
||||
"""class representing an AugAssign node"""
|
||||
_astroid_fields = ('target', 'value',)
|
||||
target = None
|
||||
value = None
|
||||
|
||||
class Backquote(NodeNG):
|
||||
"""class representing a Backquote node"""
|
||||
_astroid_fields = ('value',)
|
||||
value = None
|
||||
|
||||
class BinOp(NodeNG):
|
||||
"""class representing a BinOp node"""
|
||||
_astroid_fields = ('left', 'right',)
|
||||
left = None
|
||||
right = None
|
||||
|
||||
class BoolOp(NodeNG):
|
||||
"""class representing a BoolOp node"""
|
||||
_astroid_fields = ('values',)
|
||||
values = None
|
||||
|
||||
class Break(Statement):
|
||||
"""class representing a Break node"""
|
||||
|
||||
|
||||
class CallFunc(NodeNG):
|
||||
"""class representing a CallFunc node"""
|
||||
_astroid_fields = ('func', 'args', 'starargs', 'kwargs')
|
||||
func = None
|
||||
args = None
|
||||
starargs = None
|
||||
kwargs = None
|
||||
|
||||
def __init__(self):
|
||||
self.starargs = None
|
||||
self.kwargs = None
|
||||
|
||||
class Compare(NodeNG):
|
||||
"""class representing a Compare node"""
|
||||
_astroid_fields = ('left', 'ops',)
|
||||
left = None
|
||||
ops = None
|
||||
|
||||
def get_children(self):
|
||||
"""override get_children for tuple fields"""
|
||||
yield self.left
|
||||
for _, comparator in self.ops:
|
||||
yield comparator # we don't want the 'op'
|
||||
|
||||
def last_child(self):
|
||||
"""override last_child"""
|
||||
# XXX maybe if self.ops:
|
||||
return self.ops[-1][1]
|
||||
#return self.left
|
||||
|
||||
class Comprehension(NodeNG):
|
||||
"""class representing a Comprehension node"""
|
||||
_astroid_fields = ('target', 'iter', 'ifs')
|
||||
target = None
|
||||
iter = None
|
||||
ifs = None
|
||||
|
||||
optional_assign = True
|
||||
def ass_type(self):
|
||||
return self
|
||||
|
||||
def _get_filtered_stmts(self, lookup_node, node, stmts, mystmt):
|
||||
"""method used in filter_stmts"""
|
||||
if self is mystmt:
|
||||
if isinstance(lookup_node, (Const, Name)):
|
||||
return [lookup_node], True
|
||||
|
||||
elif self.statement() is mystmt:
|
||||
# original node's statement is the assignment, only keeps
|
||||
# current node (gen exp, list comp)
|
||||
|
||||
return [node], True
|
||||
|
||||
return stmts, False
|
||||
|
||||
|
||||
class Const(NodeNG, Instance):
|
||||
"""represent a constant node like num, str, bool, None, bytes"""
|
||||
|
||||
def __init__(self, value=None):
|
||||
self.value = value
|
||||
|
||||
def getitem(self, index, context=None):
|
||||
if isinstance(self.value, six.string_types):
|
||||
return Const(self.value[index])
|
||||
raise TypeError('%r (value=%s)' % (self, self.value))
|
||||
|
||||
def has_dynamic_getattr(self):
|
||||
return False
|
||||
|
||||
def itered(self):
|
||||
if isinstance(self.value, six.string_types):
|
||||
return self.value
|
||||
raise TypeError()
|
||||
|
||||
def pytype(self):
|
||||
return self._proxied.qname()
|
||||
|
||||
|
||||
class Continue(Statement):
|
||||
"""class representing a Continue node"""
|
||||
|
||||
|
||||
class Decorators(NodeNG):
|
||||
"""class representing a Decorators node"""
|
||||
_astroid_fields = ('nodes',)
|
||||
nodes = None
|
||||
|
||||
def __init__(self, nodes=None):
|
||||
self.nodes = nodes
|
||||
|
||||
def scope(self):
|
||||
# skip the function node to go directly to the upper level scope
|
||||
return self.parent.parent.scope()
|
||||
|
||||
class DelAttr(NodeNG, ParentAssignTypeMixin):
|
||||
"""class representing a DelAttr node"""
|
||||
_astroid_fields = ('expr',)
|
||||
expr = None
|
||||
|
||||
|
||||
class Delete(Statement, AssignTypeMixin):
|
||||
"""class representing a Delete node"""
|
||||
_astroid_fields = ('targets',)
|
||||
targets = None
|
||||
|
||||
|
||||
class Dict(NodeNG, Instance):
|
||||
"""class representing a Dict node"""
|
||||
_astroid_fields = ('items',)
|
||||
|
||||
def __init__(self, items=None):
|
||||
if items is None:
|
||||
self.items = []
|
||||
else:
|
||||
self.items = [(const_factory(k), const_factory(v))
|
||||
for k, v in items.items()]
|
||||
|
||||
def pytype(self):
|
||||
return '%s.dict' % BUILTINS
|
||||
|
||||
def get_children(self):
|
||||
"""get children of a Dict node"""
|
||||
# overrides get_children
|
||||
for key, value in self.items:
|
||||
yield key
|
||||
yield value
|
||||
|
||||
def last_child(self):
|
||||
"""override last_child"""
|
||||
if self.items:
|
||||
return self.items[-1][1]
|
||||
return None
|
||||
|
||||
def itered(self):
|
||||
return self.items[::2]
|
||||
|
||||
def getitem(self, lookup_key, context=None):
|
||||
for key, value in self.items:
|
||||
for inferedkey in key.infer(context):
|
||||
if inferedkey is YES:
|
||||
continue
|
||||
if isinstance(inferedkey, Const) \
|
||||
and inferedkey.value == lookup_key:
|
||||
return value
|
||||
# This should raise KeyError, but all call sites only catch
|
||||
# IndexError. Let's leave it like that for now.
|
||||
raise IndexError(lookup_key)
|
||||
|
||||
|
||||
class Discard(Statement):
|
||||
"""class representing a Discard node"""
|
||||
_astroid_fields = ('value',)
|
||||
value = None
|
||||
|
||||
|
||||
class Ellipsis(NodeNG):
|
||||
"""class representing an Ellipsis node"""
|
||||
|
||||
|
||||
class EmptyNode(NodeNG):
|
||||
"""class representing an EmptyNode node"""
|
||||
|
||||
|
||||
class ExceptHandler(Statement, AssignTypeMixin):
|
||||
"""class representing an ExceptHandler node"""
|
||||
_astroid_fields = ('type', 'name', 'body',)
|
||||
type = None
|
||||
name = None
|
||||
body = None
|
||||
|
||||
@cachedproperty
|
||||
def blockstart_tolineno(self):
|
||||
if self.name:
|
||||
return self.name.tolineno
|
||||
elif self.type:
|
||||
return self.type.tolineno
|
||||
else:
|
||||
return self.lineno
|
||||
|
||||
def catch(self, exceptions):
|
||||
if self.type is None or exceptions is None:
|
||||
return True
|
||||
for node in self.type.nodes_of_class(Name):
|
||||
if node.name in exceptions:
|
||||
return True
|
||||
|
||||
|
||||
class Exec(Statement):
|
||||
"""class representing an Exec node"""
|
||||
_astroid_fields = ('expr', 'globals', 'locals',)
|
||||
expr = None
|
||||
globals = None
|
||||
locals = None
|
||||
|
||||
|
||||
class ExtSlice(NodeNG):
|
||||
"""class representing an ExtSlice node"""
|
||||
_astroid_fields = ('dims',)
|
||||
dims = None
|
||||
|
||||
class For(BlockRangeMixIn, AssignTypeMixin, Statement):
|
||||
"""class representing a For node"""
|
||||
_astroid_fields = ('target', 'iter', 'body', 'orelse',)
|
||||
target = None
|
||||
iter = None
|
||||
body = None
|
||||
orelse = None
|
||||
|
||||
optional_assign = True
|
||||
@cachedproperty
|
||||
def blockstart_tolineno(self):
|
||||
return self.iter.tolineno
|
||||
|
||||
|
||||
class From(FromImportMixIn, Statement):
|
||||
"""class representing a From node"""
|
||||
|
||||
def __init__(self, fromname, names, level=0):
|
||||
self.modname = fromname
|
||||
self.names = names
|
||||
self.level = level
|
||||
|
||||
class Getattr(NodeNG):
|
||||
"""class representing a Getattr node"""
|
||||
_astroid_fields = ('expr',)
|
||||
expr = None
|
||||
|
||||
|
||||
class Global(Statement):
|
||||
"""class representing a Global node"""
|
||||
|
||||
def __init__(self, names):
|
||||
self.names = names
|
||||
|
||||
def _infer_name(self, frame, name):
|
||||
return name
|
||||
|
||||
|
||||
class If(BlockRangeMixIn, Statement):
|
||||
"""class representing an If node"""
|
||||
_astroid_fields = ('test', 'body', 'orelse')
|
||||
test = None
|
||||
body = None
|
||||
orelse = None
|
||||
|
||||
@cachedproperty
|
||||
def blockstart_tolineno(self):
|
||||
return self.test.tolineno
|
||||
|
||||
def block_range(self, lineno):
|
||||
"""handle block line numbers range for if statements"""
|
||||
if lineno == self.body[0].fromlineno:
|
||||
return lineno, lineno
|
||||
if lineno <= self.body[-1].tolineno:
|
||||
return lineno, self.body[-1].tolineno
|
||||
return self._elsed_block_range(lineno, self.orelse,
|
||||
self.body[0].fromlineno - 1)
|
||||
|
||||
|
||||
class IfExp(NodeNG):
|
||||
"""class representing an IfExp node"""
|
||||
_astroid_fields = ('test', 'body', 'orelse')
|
||||
test = None
|
||||
body = None
|
||||
orelse = None
|
||||
|
||||
|
||||
class Import(FromImportMixIn, Statement):
|
||||
"""class representing an Import node"""
|
||||
|
||||
|
||||
class Index(NodeNG):
|
||||
"""class representing an Index node"""
|
||||
_astroid_fields = ('value',)
|
||||
value = None
|
||||
|
||||
|
||||
class Keyword(NodeNG):
|
||||
"""class representing a Keyword node"""
|
||||
_astroid_fields = ('value',)
|
||||
value = None
|
||||
|
||||
|
||||
class List(NodeNG, Instance, ParentAssignTypeMixin):
|
||||
"""class representing a List node"""
|
||||
_astroid_fields = ('elts',)
|
||||
|
||||
def __init__(self, elts=None):
|
||||
if elts is None:
|
||||
self.elts = []
|
||||
else:
|
||||
self.elts = [const_factory(e) for e in elts]
|
||||
|
||||
def pytype(self):
|
||||
return '%s.list' % BUILTINS
|
||||
|
||||
def getitem(self, index, context=None):
|
||||
return self.elts[index]
|
||||
|
||||
def itered(self):
|
||||
return self.elts
|
||||
|
||||
|
||||
class Nonlocal(Statement):
|
||||
"""class representing a Nonlocal node"""
|
||||
|
||||
def __init__(self, names):
|
||||
self.names = names
|
||||
|
||||
def _infer_name(self, frame, name):
|
||||
return name
|
||||
|
||||
|
||||
class Pass(Statement):
|
||||
"""class representing a Pass node"""
|
||||
|
||||
|
||||
class Print(Statement):
|
||||
"""class representing a Print node"""
|
||||
_astroid_fields = ('dest', 'values',)
|
||||
dest = None
|
||||
values = None
|
||||
|
||||
|
||||
class Raise(Statement):
|
||||
"""class representing a Raise node"""
|
||||
exc = None
|
||||
if sys.version_info < (3, 0):
|
||||
_astroid_fields = ('exc', 'inst', 'tback')
|
||||
inst = None
|
||||
tback = None
|
||||
else:
|
||||
_astroid_fields = ('exc', 'cause')
|
||||
exc = None
|
||||
cause = None
|
||||
|
||||
def raises_not_implemented(self):
|
||||
if not self.exc:
|
||||
return
|
||||
for name in self.exc.nodes_of_class(Name):
|
||||
if name.name == 'NotImplementedError':
|
||||
return True
|
||||
|
||||
|
||||
class Return(Statement):
|
||||
"""class representing a Return node"""
|
||||
_astroid_fields = ('value',)
|
||||
value = None
|
||||
|
||||
|
||||
class Set(NodeNG, Instance, ParentAssignTypeMixin):
|
||||
"""class representing a Set node"""
|
||||
_astroid_fields = ('elts',)
|
||||
|
||||
def __init__(self, elts=None):
|
||||
if elts is None:
|
||||
self.elts = []
|
||||
else:
|
||||
self.elts = [const_factory(e) for e in elts]
|
||||
|
||||
def pytype(self):
|
||||
return '%s.set' % BUILTINS
|
||||
|
||||
def itered(self):
|
||||
return self.elts
|
||||
|
||||
|
||||
class Slice(NodeNG):
|
||||
"""class representing a Slice node"""
|
||||
_astroid_fields = ('lower', 'upper', 'step')
|
||||
lower = None
|
||||
upper = None
|
||||
step = None
|
||||
|
||||
class Starred(NodeNG, ParentAssignTypeMixin):
|
||||
"""class representing a Starred node"""
|
||||
_astroid_fields = ('value',)
|
||||
value = None
|
||||
|
||||
|
||||
class Subscript(NodeNG):
|
||||
"""class representing a Subscript node"""
|
||||
_astroid_fields = ('value', 'slice')
|
||||
value = None
|
||||
slice = None
|
||||
|
||||
|
||||
class TryExcept(BlockRangeMixIn, Statement):
|
||||
"""class representing a TryExcept node"""
|
||||
_astroid_fields = ('body', 'handlers', 'orelse',)
|
||||
body = None
|
||||
handlers = None
|
||||
orelse = None
|
||||
|
||||
def _infer_name(self, frame, name):
|
||||
return name
|
||||
|
||||
def block_range(self, lineno):
|
||||
"""handle block line numbers range for try/except statements"""
|
||||
last = None
|
||||
for exhandler in self.handlers:
|
||||
if exhandler.type and lineno == exhandler.type.fromlineno:
|
||||
return lineno, lineno
|
||||
if exhandler.body[0].fromlineno <= lineno <= exhandler.body[-1].tolineno:
|
||||
return lineno, exhandler.body[-1].tolineno
|
||||
if last is None:
|
||||
last = exhandler.body[0].fromlineno - 1
|
||||
return self._elsed_block_range(lineno, self.orelse, last)
|
||||
|
||||
|
||||
class TryFinally(BlockRangeMixIn, Statement):
|
||||
"""class representing a TryFinally node"""
|
||||
_astroid_fields = ('body', 'finalbody',)
|
||||
body = None
|
||||
finalbody = None
|
||||
|
||||
def block_range(self, lineno):
|
||||
"""handle block line numbers range for try/finally statements"""
|
||||
child = self.body[0]
|
||||
# py2.5 try: except: finally:
|
||||
if (isinstance(child, TryExcept) and child.fromlineno == self.fromlineno
|
||||
and lineno > self.fromlineno and lineno <= child.tolineno):
|
||||
return child.block_range(lineno)
|
||||
return self._elsed_block_range(lineno, self.finalbody)
|
||||
|
||||
|
||||
class Tuple(NodeNG, Instance, ParentAssignTypeMixin):
|
||||
"""class representing a Tuple node"""
|
||||
_astroid_fields = ('elts',)
|
||||
|
||||
def __init__(self, elts=None):
|
||||
if elts is None:
|
||||
self.elts = []
|
||||
else:
|
||||
self.elts = [const_factory(e) for e in elts]
|
||||
|
||||
def pytype(self):
|
||||
return '%s.tuple' % BUILTINS
|
||||
|
||||
def getitem(self, index, context=None):
|
||||
return self.elts[index]
|
||||
|
||||
def itered(self):
|
||||
return self.elts
|
||||
|
||||
|
||||
class UnaryOp(NodeNG):
|
||||
"""class representing an UnaryOp node"""
|
||||
_astroid_fields = ('operand',)
|
||||
operand = None
|
||||
|
||||
|
||||
class While(BlockRangeMixIn, Statement):
|
||||
"""class representing a While node"""
|
||||
_astroid_fields = ('test', 'body', 'orelse',)
|
||||
test = None
|
||||
body = None
|
||||
orelse = None
|
||||
|
||||
@cachedproperty
|
||||
def blockstart_tolineno(self):
|
||||
return self.test.tolineno
|
||||
|
||||
def block_range(self, lineno):
|
||||
"""handle block line numbers range for for and while statements"""
|
||||
return self. _elsed_block_range(lineno, self.orelse)
|
||||
|
||||
|
||||
class With(BlockRangeMixIn, AssignTypeMixin, Statement):
|
||||
"""class representing a With node"""
|
||||
_astroid_fields = ('items', 'body')
|
||||
items = None
|
||||
body = None
|
||||
|
||||
@cachedproperty
|
||||
def blockstart_tolineno(self):
|
||||
return self.items[-1][0].tolineno
|
||||
|
||||
def get_children(self):
|
||||
for expr, var in self.items:
|
||||
yield expr
|
||||
if var:
|
||||
yield var
|
||||
for elt in self.body:
|
||||
yield elt
|
||||
|
||||
class Yield(NodeNG):
|
||||
"""class representing a Yield node"""
|
||||
_astroid_fields = ('value',)
|
||||
value = None
|
||||
|
||||
class YieldFrom(Yield):
|
||||
""" Class representing a YieldFrom node. """
|
||||
|
||||
# constants ##############################################################
|
||||
|
||||
CONST_CLS = {
|
||||
list: List,
|
||||
tuple: Tuple,
|
||||
dict: Dict,
|
||||
set: Set,
|
||||
type(None): Const,
|
||||
}
|
||||
|
||||
def _update_const_classes():
|
||||
"""update constant classes, so the keys of CONST_CLS can be reused"""
|
||||
klasses = (bool, int, float, complex, str)
|
||||
if sys.version_info < (3, 0):
|
||||
klasses += (unicode, long)
|
||||
if sys.version_info >= (2, 6):
|
||||
klasses += (bytes,)
|
||||
for kls in klasses:
|
||||
CONST_CLS[kls] = Const
|
||||
_update_const_classes()
|
||||
|
||||
def const_factory(value):
|
||||
"""return an astroid node for a python value"""
|
||||
# XXX we should probably be stricter here and only consider stuff in
|
||||
# CONST_CLS or do better treatment: in case where value is not in CONST_CLS,
|
||||
# we should rather recall the builder on this value than returning an empty
|
||||
# node (another option being that const_factory shouldn't be called with something
|
||||
# not in CONST_CLS)
|
||||
assert not isinstance(value, NodeNG)
|
||||
try:
|
||||
return CONST_CLS[value.__class__](value)
|
||||
except (KeyError, AttributeError):
|
||||
node = EmptyNode()
|
||||
node.object = value
|
||||
return node
|
||||
74
plugins/bundle/python-mode/pymode/libs/astroid/nodes.py
Normal file
74
plugins/bundle/python-mode/pymode/libs/astroid/nodes.py
Normal file
|
|
@ -0,0 +1,74 @@
|
|||
# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
|
||||
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
|
||||
#
|
||||
# This file is part of astroid.
|
||||
#
|
||||
# astroid is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by the
|
||||
# Free Software Foundation, either version 2.1 of the License, or (at your
|
||||
# option) any later version.
|
||||
#
|
||||
# astroid is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
|
||||
# for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License along
|
||||
# with astroid. If not, see <http://www.gnu.org/licenses/>.
|
||||
"""
|
||||
on all nodes :
|
||||
.is_statement, returning true if the node should be considered as a
|
||||
statement node
|
||||
.root(), returning the root node of the tree (i.e. a Module)
|
||||
.previous_sibling(), returning previous sibling statement node
|
||||
.next_sibling(), returning next sibling statement node
|
||||
.statement(), returning the first parent node marked as statement node
|
||||
.frame(), returning the first node defining a new local scope (i.e.
|
||||
Module, Function or Class)
|
||||
.set_local(name, node), define an identifier <name> on the first parent frame,
|
||||
with the node defining it. This is used by the astroid builder and should not
|
||||
be used from out there.
|
||||
|
||||
on From and Import :
|
||||
.real_name(name),
|
||||
|
||||
|
||||
"""
|
||||
# pylint: disable=unused-import
|
||||
|
||||
__docformat__ = "restructuredtext en"
|
||||
|
||||
from astroid.node_classes import Arguments, AssAttr, Assert, Assign, \
|
||||
AssName, AugAssign, Backquote, BinOp, BoolOp, Break, CallFunc, Compare, \
|
||||
Comprehension, Const, Continue, Decorators, DelAttr, DelName, Delete, \
|
||||
Dict, Discard, Ellipsis, EmptyNode, ExceptHandler, Exec, ExtSlice, For, \
|
||||
From, Getattr, Global, If, IfExp, Import, Index, Keyword, \
|
||||
List, Name, Nonlocal, Pass, Print, Raise, Return, Set, Slice, Starred, Subscript, \
|
||||
TryExcept, TryFinally, Tuple, UnaryOp, While, With, Yield, YieldFrom, \
|
||||
const_factory
|
||||
from astroid.scoped_nodes import Module, GenExpr, Lambda, DictComp, \
|
||||
ListComp, SetComp, Function, Class
|
||||
|
||||
ALL_NODE_CLASSES = (
|
||||
Arguments, AssAttr, Assert, Assign, AssName, AugAssign,
|
||||
Backquote, BinOp, BoolOp, Break,
|
||||
CallFunc, Class, Compare, Comprehension, Const, Continue,
|
||||
Decorators, DelAttr, DelName, Delete,
|
||||
Dict, DictComp, Discard,
|
||||
Ellipsis, EmptyNode, ExceptHandler, Exec, ExtSlice,
|
||||
For, From, Function,
|
||||
Getattr, GenExpr, Global,
|
||||
If, IfExp, Import, Index,
|
||||
Keyword,
|
||||
Lambda, List, ListComp,
|
||||
Name, Nonlocal,
|
||||
Module,
|
||||
Pass, Print,
|
||||
Raise, Return,
|
||||
Set, SetComp, Slice, Starred, Subscript,
|
||||
TryExcept, TryFinally, Tuple,
|
||||
UnaryOp,
|
||||
While, With,
|
||||
Yield, YieldFrom
|
||||
)
|
||||
|
||||
415
plugins/bundle/python-mode/pymode/libs/astroid/protocols.py
Normal file
415
plugins/bundle/python-mode/pymode/libs/astroid/protocols.py
Normal file
|
|
@ -0,0 +1,415 @@
|
|||
# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
|
||||
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
|
||||
#
|
||||
# This file is part of astroid.
|
||||
#
|
||||
# astroid is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by the
|
||||
# Free Software Foundation, either version 2.1 of the License, or (at your
|
||||
# option) any later version.
|
||||
#
|
||||
# astroid is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
|
||||
# for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License along
|
||||
# with astroid. If not, see <http://www.gnu.org/licenses/>.
|
||||
"""this module contains a set of functions to handle python protocols for nodes
|
||||
where it makes sense.
|
||||
"""
|
||||
|
||||
__doctype__ = "restructuredtext en"
|
||||
import collections
|
||||
|
||||
from astroid.exceptions import InferenceError, NoDefault, NotFoundError
|
||||
from astroid.node_classes import unpack_infer
|
||||
from astroid.bases import InferenceContext, copy_context, \
|
||||
raise_if_nothing_infered, yes_if_nothing_infered, Instance, YES
|
||||
from astroid.nodes import const_factory
|
||||
from astroid import nodes
|
||||
|
||||
BIN_OP_METHOD = {'+': '__add__',
|
||||
'-': '__sub__',
|
||||
'/': '__div__',
|
||||
'//': '__floordiv__',
|
||||
'*': '__mul__',
|
||||
'**': '__power__',
|
||||
'%': '__mod__',
|
||||
'&': '__and__',
|
||||
'|': '__or__',
|
||||
'^': '__xor__',
|
||||
'<<': '__lshift__',
|
||||
'>>': '__rshift__',
|
||||
}
|
||||
|
||||
UNARY_OP_METHOD = {'+': '__pos__',
|
||||
'-': '__neg__',
|
||||
'~': '__invert__',
|
||||
'not': None, # XXX not '__nonzero__'
|
||||
}
|
||||
|
||||
# unary operations ############################################################
|
||||
|
||||
def tl_infer_unary_op(self, operator):
|
||||
if operator == 'not':
|
||||
return const_factory(not bool(self.elts))
|
||||
raise TypeError() # XXX log unsupported operation
|
||||
nodes.Tuple.infer_unary_op = tl_infer_unary_op
|
||||
nodes.List.infer_unary_op = tl_infer_unary_op
|
||||
|
||||
|
||||
def dict_infer_unary_op(self, operator):
|
||||
if operator == 'not':
|
||||
return const_factory(not bool(self.items))
|
||||
raise TypeError() # XXX log unsupported operation
|
||||
nodes.Dict.infer_unary_op = dict_infer_unary_op
|
||||
|
||||
|
||||
def const_infer_unary_op(self, operator):
|
||||
if operator == 'not':
|
||||
return const_factory(not self.value)
|
||||
# XXX log potentially raised TypeError
|
||||
elif operator == '+':
|
||||
return const_factory(+self.value)
|
||||
else: # operator == '-':
|
||||
return const_factory(-self.value)
|
||||
nodes.Const.infer_unary_op = const_infer_unary_op
|
||||
|
||||
|
||||
# binary operations ###########################################################
|
||||
|
||||
BIN_OP_IMPL = {'+': lambda a, b: a + b,
|
||||
'-': lambda a, b: a - b,
|
||||
'/': lambda a, b: a / b,
|
||||
'//': lambda a, b: a // b,
|
||||
'*': lambda a, b: a * b,
|
||||
'**': lambda a, b: a ** b,
|
||||
'%': lambda a, b: a % b,
|
||||
'&': lambda a, b: a & b,
|
||||
'|': lambda a, b: a | b,
|
||||
'^': lambda a, b: a ^ b,
|
||||
'<<': lambda a, b: a << b,
|
||||
'>>': lambda a, b: a >> b,
|
||||
}
|
||||
for key, impl in list(BIN_OP_IMPL.items()):
|
||||
BIN_OP_IMPL[key+'='] = impl
|
||||
|
||||
def const_infer_binary_op(self, operator, other, context):
|
||||
for other in other.infer(context):
|
||||
if isinstance(other, nodes.Const):
|
||||
try:
|
||||
impl = BIN_OP_IMPL[operator]
|
||||
|
||||
try:
|
||||
yield const_factory(impl(self.value, other.value))
|
||||
except Exception:
|
||||
# ArithmeticError is not enough: float >> float is a TypeError
|
||||
# TODO : let pylint know about the problem
|
||||
pass
|
||||
except TypeError:
|
||||
# XXX log TypeError
|
||||
continue
|
||||
elif other is YES:
|
||||
yield other
|
||||
else:
|
||||
try:
|
||||
for val in other.infer_binary_op(operator, self, context):
|
||||
yield val
|
||||
except AttributeError:
|
||||
yield YES
|
||||
nodes.Const.infer_binary_op = yes_if_nothing_infered(const_infer_binary_op)
|
||||
|
||||
|
||||
def tl_infer_binary_op(self, operator, other, context):
|
||||
for other in other.infer(context):
|
||||
if isinstance(other, self.__class__) and operator == '+':
|
||||
node = self.__class__()
|
||||
elts = [n for elt in self.elts for n in elt.infer(context)
|
||||
if not n is YES]
|
||||
elts += [n for elt in other.elts for n in elt.infer(context)
|
||||
if not n is YES]
|
||||
node.elts = elts
|
||||
yield node
|
||||
elif isinstance(other, nodes.Const) and operator == '*':
|
||||
if not isinstance(other.value, int):
|
||||
yield YES
|
||||
continue
|
||||
node = self.__class__()
|
||||
elts = [n for elt in self.elts for n in elt.infer(context)
|
||||
if not n is YES] * other.value
|
||||
node.elts = elts
|
||||
yield node
|
||||
elif isinstance(other, Instance) and not isinstance(other, nodes.Const):
|
||||
yield YES
|
||||
# XXX else log TypeError
|
||||
nodes.Tuple.infer_binary_op = yes_if_nothing_infered(tl_infer_binary_op)
|
||||
nodes.List.infer_binary_op = yes_if_nothing_infered(tl_infer_binary_op)
|
||||
|
||||
|
||||
def dict_infer_binary_op(self, operator, other, context):
|
||||
for other in other.infer(context):
|
||||
if isinstance(other, Instance) and isinstance(other._proxied, nodes.Class):
|
||||
yield YES
|
||||
# XXX else log TypeError
|
||||
nodes.Dict.infer_binary_op = yes_if_nothing_infered(dict_infer_binary_op)
|
||||
|
||||
def instance_infer_binary_op(self, operator, other, context):
|
||||
try:
|
||||
methods = self.getattr(BIN_OP_METHOD[operator])
|
||||
except (NotFoundError, KeyError):
|
||||
# Unknown operator
|
||||
yield YES
|
||||
else:
|
||||
for method in methods:
|
||||
if not isinstance(method, nodes.Function):
|
||||
continue
|
||||
for result in method.infer_call_result(self, context):
|
||||
if result is not YES:
|
||||
yield result
|
||||
# We are interested only in the first infered method,
|
||||
# don't go looking in the rest of the methods of the ancestors.
|
||||
break
|
||||
|
||||
Instance.infer_binary_op = yes_if_nothing_infered(instance_infer_binary_op)
|
||||
|
||||
|
||||
# assignment ##################################################################
|
||||
|
||||
"""the assigned_stmts method is responsible to return the assigned statement
|
||||
(e.g. not inferred) according to the assignment type.
|
||||
|
||||
The `asspath` argument is used to record the lhs path of the original node.
|
||||
For instance if we want assigned statements for 'c' in 'a, (b,c)', asspath
|
||||
will be [1, 1] once arrived to the Assign node.
|
||||
|
||||
The `context` argument is the current inference context which should be given
|
||||
to any intermediary inference necessary.
|
||||
"""
|
||||
|
||||
def _resolve_looppart(parts, asspath, context):
|
||||
"""recursive function to resolve multiple assignments on loops"""
|
||||
asspath = asspath[:]
|
||||
index = asspath.pop(0)
|
||||
for part in parts:
|
||||
if part is YES:
|
||||
continue
|
||||
# XXX handle __iter__ and log potentially detected errors
|
||||
if not hasattr(part, 'itered'):
|
||||
continue
|
||||
try:
|
||||
itered = part.itered()
|
||||
except TypeError:
|
||||
continue # XXX log error
|
||||
for stmt in itered:
|
||||
try:
|
||||
assigned = stmt.getitem(index, context)
|
||||
except (AttributeError, IndexError):
|
||||
continue
|
||||
except TypeError: # stmt is unsubscriptable Const
|
||||
continue
|
||||
if not asspath:
|
||||
# we achieved to resolved the assignment path,
|
||||
# don't infer the last part
|
||||
yield assigned
|
||||
elif assigned is YES:
|
||||
break
|
||||
else:
|
||||
# we are not yet on the last part of the path
|
||||
# search on each possibly inferred value
|
||||
try:
|
||||
for infered in _resolve_looppart(assigned.infer(context),
|
||||
asspath, context):
|
||||
yield infered
|
||||
except InferenceError:
|
||||
break
|
||||
|
||||
|
||||
def for_assigned_stmts(self, node, context=None, asspath=None):
|
||||
if asspath is None:
|
||||
for lst in self.iter.infer(context):
|
||||
if isinstance(lst, (nodes.Tuple, nodes.List)):
|
||||
for item in lst.elts:
|
||||
yield item
|
||||
else:
|
||||
for infered in _resolve_looppart(self.iter.infer(context),
|
||||
asspath, context):
|
||||
yield infered
|
||||
|
||||
nodes.For.assigned_stmts = raise_if_nothing_infered(for_assigned_stmts)
|
||||
nodes.Comprehension.assigned_stmts = raise_if_nothing_infered(for_assigned_stmts)
|
||||
|
||||
|
||||
def mulass_assigned_stmts(self, node, context=None, asspath=None):
|
||||
if asspath is None:
|
||||
asspath = []
|
||||
asspath.insert(0, self.elts.index(node))
|
||||
return self.parent.assigned_stmts(self, context, asspath)
|
||||
nodes.Tuple.assigned_stmts = mulass_assigned_stmts
|
||||
nodes.List.assigned_stmts = mulass_assigned_stmts
|
||||
|
||||
|
||||
def assend_assigned_stmts(self, context=None):
|
||||
return self.parent.assigned_stmts(self, context=context)
|
||||
nodes.AssName.assigned_stmts = assend_assigned_stmts
|
||||
nodes.AssAttr.assigned_stmts = assend_assigned_stmts
|
||||
|
||||
|
||||
def _arguments_infer_argname(self, name, context):
|
||||
# arguments information may be missing, in which case we can't do anything
|
||||
# more
|
||||
if not (self.args or self.vararg or self.kwarg):
|
||||
yield YES
|
||||
return
|
||||
# first argument of instance/class method
|
||||
if self.args and getattr(self.args[0], 'name', None) == name:
|
||||
functype = self.parent.type
|
||||
if functype == 'method':
|
||||
yield Instance(self.parent.parent.frame())
|
||||
return
|
||||
if functype == 'classmethod':
|
||||
yield self.parent.parent.frame()
|
||||
return
|
||||
if name == self.vararg:
|
||||
vararg = const_factory(())
|
||||
vararg.parent = self
|
||||
yield vararg
|
||||
return
|
||||
if name == self.kwarg:
|
||||
kwarg = const_factory({})
|
||||
kwarg.parent = self
|
||||
yield kwarg
|
||||
return
|
||||
# if there is a default value, yield it. And then yield YES to reflect
|
||||
# we can't guess given argument value
|
||||
try:
|
||||
context = copy_context(context)
|
||||
for infered in self.default_value(name).infer(context):
|
||||
yield infered
|
||||
yield YES
|
||||
except NoDefault:
|
||||
yield YES
|
||||
|
||||
|
||||
def arguments_assigned_stmts(self, node, context, asspath=None):
|
||||
if context.callcontext:
|
||||
# reset call context/name
|
||||
callcontext = context.callcontext
|
||||
context = copy_context(context)
|
||||
context.callcontext = None
|
||||
return callcontext.infer_argument(self.parent, node.name, context)
|
||||
return _arguments_infer_argname(self, node.name, context)
|
||||
nodes.Arguments.assigned_stmts = arguments_assigned_stmts
|
||||
|
||||
|
||||
def assign_assigned_stmts(self, node, context=None, asspath=None):
|
||||
if not asspath:
|
||||
yield self.value
|
||||
return
|
||||
for infered in _resolve_asspart(self.value.infer(context), asspath, context):
|
||||
yield infered
|
||||
nodes.Assign.assigned_stmts = raise_if_nothing_infered(assign_assigned_stmts)
|
||||
nodes.AugAssign.assigned_stmts = raise_if_nothing_infered(assign_assigned_stmts)
|
||||
|
||||
|
||||
def _resolve_asspart(parts, asspath, context):
|
||||
"""recursive function to resolve multiple assignments"""
|
||||
asspath = asspath[:]
|
||||
index = asspath.pop(0)
|
||||
for part in parts:
|
||||
if hasattr(part, 'getitem'):
|
||||
try:
|
||||
assigned = part.getitem(index, context)
|
||||
# XXX raise a specific exception to avoid potential hiding of
|
||||
# unexpected exception ?
|
||||
except (TypeError, IndexError):
|
||||
return
|
||||
if not asspath:
|
||||
# we achieved to resolved the assignment path, don't infer the
|
||||
# last part
|
||||
yield assigned
|
||||
elif assigned is YES:
|
||||
return
|
||||
else:
|
||||
# we are not yet on the last part of the path search on each
|
||||
# possibly inferred value
|
||||
try:
|
||||
for infered in _resolve_asspart(assigned.infer(context),
|
||||
asspath, context):
|
||||
yield infered
|
||||
except InferenceError:
|
||||
return
|
||||
|
||||
|
||||
def excepthandler_assigned_stmts(self, node, context=None, asspath=None):
|
||||
for assigned in unpack_infer(self.type):
|
||||
if isinstance(assigned, nodes.Class):
|
||||
assigned = Instance(assigned)
|
||||
yield assigned
|
||||
nodes.ExceptHandler.assigned_stmts = raise_if_nothing_infered(excepthandler_assigned_stmts)
|
||||
|
||||
|
||||
def with_assigned_stmts(self, node, context=None, asspath=None):
|
||||
if asspath is None:
|
||||
for _, vars in self.items:
|
||||
if vars is None:
|
||||
continue
|
||||
for lst in vars.infer(context):
|
||||
if isinstance(lst, (nodes.Tuple, nodes.List)):
|
||||
for item in lst.nodes:
|
||||
yield item
|
||||
nodes.With.assigned_stmts = raise_if_nothing_infered(with_assigned_stmts)
|
||||
|
||||
|
||||
def starred_assigned_stmts(self, node=None, context=None, asspath=None):
|
||||
stmt = self.statement()
|
||||
if not isinstance(stmt, (nodes.Assign, nodes.For)):
|
||||
raise InferenceError()
|
||||
|
||||
if isinstance(stmt, nodes.Assign):
|
||||
value = stmt.value
|
||||
lhs = stmt.targets[0]
|
||||
|
||||
if sum(1 for node in lhs.nodes_of_class(nodes.Starred)) > 1:
|
||||
# Too many starred arguments in the expression.
|
||||
raise InferenceError()
|
||||
|
||||
if context is None:
|
||||
context = InferenceContext()
|
||||
try:
|
||||
rhs = next(value.infer(context))
|
||||
except InferenceError:
|
||||
yield YES
|
||||
return
|
||||
if rhs is YES or not hasattr(rhs, 'elts'):
|
||||
# Not interested in inferred values without elts.
|
||||
yield YES
|
||||
return
|
||||
|
||||
elts = collections.deque(rhs.elts[:])
|
||||
if len(lhs.elts) > len(rhs.elts):
|
||||
# a, *b, c = (1, 2)
|
||||
raise InferenceError()
|
||||
|
||||
# Unpack iteratively the values from the rhs of the assignment,
|
||||
# until the find the starred node. What will remain will
|
||||
# be the list of values which the Starred node will represent
|
||||
# This is done in two steps, from left to right to remove
|
||||
# anything before the starred node and from right to left
|
||||
# to remvoe anything after the starred node.
|
||||
|
||||
for index, node in enumerate(lhs.elts):
|
||||
if not isinstance(node, nodes.Starred):
|
||||
elts.popleft()
|
||||
continue
|
||||
lhs_elts = collections.deque(reversed(lhs.elts[index:]))
|
||||
for node in lhs_elts:
|
||||
if not isinstance(node, nodes.Starred):
|
||||
elts.pop()
|
||||
continue
|
||||
# We're done
|
||||
for elt in elts:
|
||||
yield elt
|
||||
break
|
||||
|
||||
nodes.Starred.assigned_stmts = starred_assigned_stmts
|
||||
366
plugins/bundle/python-mode/pymode/libs/astroid/raw_building.py
Normal file
366
plugins/bundle/python-mode/pymode/libs/astroid/raw_building.py
Normal file
|
|
@ -0,0 +1,366 @@
|
|||
# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
|
||||
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
|
||||
#
|
||||
# This file is part of astroid.
|
||||
#
|
||||
# astroid is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by the
|
||||
# Free Software Foundation, either version 2.1 of the License, or (at your
|
||||
# option) any later version.
|
||||
#
|
||||
# astroid is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
|
||||
# for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License along
|
||||
# with astroid. If not, see <http://www.gnu.org/licenses/>.
|
||||
"""this module contains a set of functions to create astroid trees from scratch
|
||||
(build_* functions) or from living object (object_build_* functions)
|
||||
"""
|
||||
|
||||
__docformat__ = "restructuredtext en"
|
||||
|
||||
import sys
|
||||
from os.path import abspath
|
||||
from inspect import (getargspec, isdatadescriptor, isfunction, ismethod,
|
||||
ismethoddescriptor, isclass, isbuiltin, ismodule)
|
||||
import six
|
||||
|
||||
from astroid.node_classes import CONST_CLS
|
||||
from astroid.nodes import (Module, Class, Const, const_factory, From,
|
||||
Function, EmptyNode, Name, Arguments)
|
||||
from astroid.bases import BUILTINS, Generator
|
||||
from astroid.manager import AstroidManager
|
||||
MANAGER = AstroidManager()
|
||||
|
||||
_CONSTANTS = tuple(CONST_CLS) # the keys of CONST_CLS eg python builtin types
|
||||
|
||||
def _io_discrepancy(member):
|
||||
# _io module names itself `io`: http://bugs.python.org/issue18602
|
||||
member_self = getattr(member, '__self__', None)
|
||||
return (member_self and
|
||||
ismodule(member_self) and
|
||||
member_self.__name__ == '_io' and
|
||||
member.__module__ == 'io')
|
||||
|
||||
def _attach_local_node(parent, node, name):
|
||||
node.name = name # needed by add_local_node
|
||||
parent.add_local_node(node)
|
||||
|
||||
_marker = object()
|
||||
|
||||
def attach_dummy_node(node, name, object=_marker):
|
||||
"""create a dummy node and register it in the locals of the given
|
||||
node with the specified name
|
||||
"""
|
||||
enode = EmptyNode()
|
||||
enode.object = object
|
||||
_attach_local_node(node, enode, name)
|
||||
|
||||
def _has_underlying_object(self):
|
||||
return hasattr(self, 'object') and self.object is not _marker
|
||||
|
||||
EmptyNode.has_underlying_object = _has_underlying_object
|
||||
|
||||
def attach_const_node(node, name, value):
|
||||
"""create a Const node and register it in the locals of the given
|
||||
node with the specified name
|
||||
"""
|
||||
if not name in node.special_attributes:
|
||||
_attach_local_node(node, const_factory(value), name)
|
||||
|
||||
def attach_import_node(node, modname, membername):
|
||||
"""create a From node and register it in the locals of the given
|
||||
node with the specified name
|
||||
"""
|
||||
from_node = From(modname, [(membername, None)])
|
||||
_attach_local_node(node, from_node, membername)
|
||||
|
||||
|
||||
def build_module(name, doc=None):
|
||||
"""create and initialize a astroid Module node"""
|
||||
node = Module(name, doc, pure_python=False)
|
||||
node.package = False
|
||||
node.parent = None
|
||||
return node
|
||||
|
||||
def build_class(name, basenames=(), doc=None):
|
||||
"""create and initialize a astroid Class node"""
|
||||
node = Class(name, doc)
|
||||
for base in basenames:
|
||||
basenode = Name()
|
||||
basenode.name = base
|
||||
node.bases.append(basenode)
|
||||
basenode.parent = node
|
||||
return node
|
||||
|
||||
def build_function(name, args=None, defaults=None, flag=0, doc=None):
|
||||
"""create and initialize a astroid Function node"""
|
||||
args, defaults = args or [], defaults or []
|
||||
# first argument is now a list of decorators
|
||||
func = Function(name, doc)
|
||||
func.args = argsnode = Arguments()
|
||||
argsnode.args = []
|
||||
for arg in args:
|
||||
argsnode.args.append(Name())
|
||||
argsnode.args[-1].name = arg
|
||||
argsnode.args[-1].parent = argsnode
|
||||
argsnode.defaults = []
|
||||
for default in defaults:
|
||||
argsnode.defaults.append(const_factory(default))
|
||||
argsnode.defaults[-1].parent = argsnode
|
||||
argsnode.kwarg = None
|
||||
argsnode.vararg = None
|
||||
argsnode.parent = func
|
||||
if args:
|
||||
register_arguments(func)
|
||||
return func
|
||||
|
||||
|
||||
def build_from_import(fromname, names):
|
||||
"""create and initialize an astroid From import statement"""
|
||||
return From(fromname, [(name, None) for name in names])
|
||||
|
||||
def register_arguments(func, args=None):
|
||||
"""add given arguments to local
|
||||
|
||||
args is a list that may contains nested lists
|
||||
(i.e. def func(a, (b, c, d)): ...)
|
||||
"""
|
||||
if args is None:
|
||||
args = func.args.args
|
||||
if func.args.vararg:
|
||||
func.set_local(func.args.vararg, func.args)
|
||||
if func.args.kwarg:
|
||||
func.set_local(func.args.kwarg, func.args)
|
||||
for arg in args:
|
||||
if isinstance(arg, Name):
|
||||
func.set_local(arg.name, arg)
|
||||
else:
|
||||
register_arguments(func, arg.elts)
|
||||
|
||||
def object_build_class(node, member, localname):
|
||||
"""create astroid for a living class object"""
|
||||
basenames = [base.__name__ for base in member.__bases__]
|
||||
return _base_class_object_build(node, member, basenames,
|
||||
localname=localname)
|
||||
|
||||
def object_build_function(node, member, localname):
|
||||
"""create astroid for a living function object"""
|
||||
args, varargs, varkw, defaults = getargspec(member)
|
||||
if varargs is not None:
|
||||
args.append(varargs)
|
||||
if varkw is not None:
|
||||
args.append(varkw)
|
||||
func = build_function(getattr(member, '__name__', None) or localname, args,
|
||||
defaults, six.get_function_code(member).co_flags, member.__doc__)
|
||||
node.add_local_node(func, localname)
|
||||
|
||||
def object_build_datadescriptor(node, member, name):
|
||||
"""create astroid for a living data descriptor object"""
|
||||
return _base_class_object_build(node, member, [], name)
|
||||
|
||||
def object_build_methoddescriptor(node, member, localname):
|
||||
"""create astroid for a living method descriptor object"""
|
||||
# FIXME get arguments ?
|
||||
func = build_function(getattr(member, '__name__', None) or localname,
|
||||
doc=member.__doc__)
|
||||
# set node's arguments to None to notice that we have no information, not
|
||||
# and empty argument list
|
||||
func.args.args = None
|
||||
node.add_local_node(func, localname)
|
||||
|
||||
def _base_class_object_build(node, member, basenames, name=None, localname=None):
|
||||
"""create astroid for a living class object, with a given set of base names
|
||||
(e.g. ancestors)
|
||||
"""
|
||||
klass = build_class(name or getattr(member, '__name__', None) or localname,
|
||||
basenames, member.__doc__)
|
||||
klass._newstyle = isinstance(member, type)
|
||||
node.add_local_node(klass, localname)
|
||||
try:
|
||||
# limit the instantiation trick since it's too dangerous
|
||||
# (such as infinite test execution...)
|
||||
# this at least resolves common case such as Exception.args,
|
||||
# OSError.errno
|
||||
if issubclass(member, Exception):
|
||||
instdict = member().__dict__
|
||||
else:
|
||||
raise TypeError
|
||||
except:
|
||||
pass
|
||||
else:
|
||||
for name, obj in instdict.items():
|
||||
valnode = EmptyNode()
|
||||
valnode.object = obj
|
||||
valnode.parent = klass
|
||||
valnode.lineno = 1
|
||||
klass.instance_attrs[name] = [valnode]
|
||||
return klass
|
||||
|
||||
|
||||
|
||||
|
||||
class InspectBuilder(object):
|
||||
"""class for building nodes from living object
|
||||
|
||||
this is actually a really minimal representation, including only Module,
|
||||
Function and Class nodes and some others as guessed.
|
||||
"""
|
||||
|
||||
# astroid from living objects ###############################################
|
||||
|
||||
def __init__(self):
|
||||
self._done = {}
|
||||
self._module = None
|
||||
|
||||
def inspect_build(self, module, modname=None, path=None):
|
||||
"""build astroid from a living module (i.e. using inspect)
|
||||
this is used when there is no python source code available (either
|
||||
because it's a built-in module or because the .py is not available)
|
||||
"""
|
||||
self._module = module
|
||||
if modname is None:
|
||||
modname = module.__name__
|
||||
try:
|
||||
node = build_module(modname, module.__doc__)
|
||||
except AttributeError:
|
||||
# in jython, java modules have no __doc__ (see #109562)
|
||||
node = build_module(modname)
|
||||
node.file = node.path = path and abspath(path) or path
|
||||
node.name = modname
|
||||
MANAGER.cache_module(node)
|
||||
node.package = hasattr(module, '__path__')
|
||||
self._done = {}
|
||||
self.object_build(node, module)
|
||||
return node
|
||||
|
||||
def object_build(self, node, obj):
|
||||
"""recursive method which create a partial ast from real objects
|
||||
(only function, class, and method are handled)
|
||||
"""
|
||||
if obj in self._done:
|
||||
return self._done[obj]
|
||||
self._done[obj] = node
|
||||
for name in dir(obj):
|
||||
try:
|
||||
member = getattr(obj, name)
|
||||
except AttributeError:
|
||||
# damned ExtensionClass.Base, I know you're there !
|
||||
attach_dummy_node(node, name)
|
||||
continue
|
||||
if ismethod(member):
|
||||
member = six.get_method_function(member)
|
||||
if isfunction(member):
|
||||
# verify this is not an imported function
|
||||
filename = getattr(six.get_function_code(member),
|
||||
'co_filename', None)
|
||||
if filename is None:
|
||||
assert isinstance(member, object)
|
||||
object_build_methoddescriptor(node, member, name)
|
||||
elif filename != getattr(self._module, '__file__', None):
|
||||
attach_dummy_node(node, name, member)
|
||||
else:
|
||||
object_build_function(node, member, name)
|
||||
elif isbuiltin(member):
|
||||
if (not _io_discrepancy(member) and
|
||||
self.imported_member(node, member, name)):
|
||||
continue
|
||||
object_build_methoddescriptor(node, member, name)
|
||||
elif isclass(member):
|
||||
if self.imported_member(node, member, name):
|
||||
continue
|
||||
if member in self._done:
|
||||
class_node = self._done[member]
|
||||
if not class_node in node.locals.get(name, ()):
|
||||
node.add_local_node(class_node, name)
|
||||
else:
|
||||
class_node = object_build_class(node, member, name)
|
||||
# recursion
|
||||
self.object_build(class_node, member)
|
||||
if name == '__class__' and class_node.parent is None:
|
||||
class_node.parent = self._done[self._module]
|
||||
elif ismethoddescriptor(member):
|
||||
assert isinstance(member, object)
|
||||
object_build_methoddescriptor(node, member, name)
|
||||
elif isdatadescriptor(member):
|
||||
assert isinstance(member, object)
|
||||
object_build_datadescriptor(node, member, name)
|
||||
elif type(member) in _CONSTANTS:
|
||||
attach_const_node(node, name, member)
|
||||
else:
|
||||
# create an empty node so that the name is actually defined
|
||||
attach_dummy_node(node, name, member)
|
||||
|
||||
def imported_member(self, node, member, name):
|
||||
"""verify this is not an imported class or handle it"""
|
||||
# /!\ some classes like ExtensionClass doesn't have a __module__
|
||||
# attribute ! Also, this may trigger an exception on badly built module
|
||||
# (see http://www.logilab.org/ticket/57299 for instance)
|
||||
try:
|
||||
modname = getattr(member, '__module__', None)
|
||||
except:
|
||||
# XXX use logging
|
||||
print('unexpected error while building astroid from living object')
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
modname = None
|
||||
if modname is None:
|
||||
if name in ('__new__', '__subclasshook__'):
|
||||
# Python 2.5.1 (r251:54863, Sep 1 2010, 22:03:14)
|
||||
# >>> print object.__new__.__module__
|
||||
# None
|
||||
modname = BUILTINS
|
||||
else:
|
||||
attach_dummy_node(node, name, member)
|
||||
return True
|
||||
if {'gtk': 'gtk._gtk'}.get(modname, modname) != self._module.__name__:
|
||||
# check if it sounds valid and then add an import node, else use a
|
||||
# dummy node
|
||||
try:
|
||||
getattr(sys.modules[modname], name)
|
||||
except (KeyError, AttributeError):
|
||||
attach_dummy_node(node, name, member)
|
||||
else:
|
||||
attach_import_node(node, modname, name)
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
### astroid bootstrapping ######################################################
|
||||
Astroid_BUILDER = InspectBuilder()
|
||||
|
||||
_CONST_PROXY = {}
|
||||
def _astroid_bootstrapping(astroid_builtin=None):
|
||||
"""astroid boot strapping the builtins module"""
|
||||
# this boot strapping is necessary since we need the Const nodes to
|
||||
# inspect_build builtins, and then we can proxy Const
|
||||
if astroid_builtin is None:
|
||||
from logilab.common.compat import builtins
|
||||
astroid_builtin = Astroid_BUILDER.inspect_build(builtins)
|
||||
|
||||
for cls, node_cls in CONST_CLS.items():
|
||||
if cls is type(None):
|
||||
proxy = build_class('NoneType')
|
||||
proxy.parent = astroid_builtin
|
||||
else:
|
||||
proxy = astroid_builtin.getattr(cls.__name__)[0]
|
||||
if cls in (dict, list, set, tuple):
|
||||
node_cls._proxied = proxy
|
||||
else:
|
||||
_CONST_PROXY[cls] = proxy
|
||||
|
||||
_astroid_bootstrapping()
|
||||
|
||||
# TODO : find a nicer way to handle this situation;
|
||||
# However __proxied introduced an
|
||||
# infinite recursion (see https://bugs.launchpad.net/pylint/+bug/456870)
|
||||
def _set_proxied(const):
|
||||
return _CONST_PROXY[const.value.__class__]
|
||||
Const._proxied = property(_set_proxied)
|
||||
|
||||
from types import GeneratorType
|
||||
Generator._proxied = Class(GeneratorType.__name__, GeneratorType.__doc__)
|
||||
Astroid_BUILDER.object_build(Generator._proxied, GeneratorType)
|
||||
|
||||
926
plugins/bundle/python-mode/pymode/libs/astroid/rebuilder.py
Normal file
926
plugins/bundle/python-mode/pymode/libs/astroid/rebuilder.py
Normal file
|
|
@ -0,0 +1,926 @@
|
|||
# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
|
||||
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
|
||||
#
|
||||
# This file is part of astroid.
|
||||
#
|
||||
# astroid is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by the
|
||||
# Free Software Foundation, either version 2.1 of the License, or (at your
|
||||
# option) any later version.
|
||||
#
|
||||
# astroid is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
|
||||
# for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License along
|
||||
# with astroid. If not, see <http://www.gnu.org/licenses/>.
|
||||
"""this module contains utilities for rebuilding a _ast tree in
|
||||
order to get a single Astroid representation
|
||||
"""
|
||||
|
||||
import sys
|
||||
from _ast import (
|
||||
Expr as Discard, Str,
|
||||
# binary operators
|
||||
Add, BinOp, Div, FloorDiv, Mod, Mult, Pow, Sub, BitAnd, BitOr, BitXor,
|
||||
LShift, RShift,
|
||||
# logical operators
|
||||
And, Or,
|
||||
# unary operators
|
||||
UAdd, USub, Not, Invert,
|
||||
# comparison operators
|
||||
Eq, Gt, GtE, In, Is, IsNot, Lt, LtE, NotEq, NotIn,
|
||||
)
|
||||
|
||||
from astroid import nodes as new
|
||||
from astroid import astpeephole
|
||||
|
||||
|
||||
_BIN_OP_CLASSES = {Add: '+',
|
||||
BitAnd: '&',
|
||||
BitOr: '|',
|
||||
BitXor: '^',
|
||||
Div: '/',
|
||||
FloorDiv: '//',
|
||||
Mod: '%',
|
||||
Mult: '*',
|
||||
Pow: '**',
|
||||
Sub: '-',
|
||||
LShift: '<<',
|
||||
RShift: '>>',
|
||||
}
|
||||
|
||||
_BOOL_OP_CLASSES = {And: 'and',
|
||||
Or: 'or',
|
||||
}
|
||||
|
||||
_UNARY_OP_CLASSES = {UAdd: '+',
|
||||
USub: '-',
|
||||
Not: 'not',
|
||||
Invert: '~',
|
||||
}
|
||||
|
||||
_CMP_OP_CLASSES = {Eq: '==',
|
||||
Gt: '>',
|
||||
GtE: '>=',
|
||||
In: 'in',
|
||||
Is: 'is',
|
||||
IsNot: 'is not',
|
||||
Lt: '<',
|
||||
LtE: '<=',
|
||||
NotEq: '!=',
|
||||
NotIn: 'not in',
|
||||
}
|
||||
|
||||
CONST_NAME_TRANSFORMS = {'None': None,
|
||||
'True': True,
|
||||
'False': False,
|
||||
}
|
||||
|
||||
REDIRECT = {'arguments': 'Arguments',
|
||||
'Attribute': 'Getattr',
|
||||
'comprehension': 'Comprehension',
|
||||
'Call': 'CallFunc',
|
||||
'ClassDef': 'Class',
|
||||
"ListCompFor": 'Comprehension',
|
||||
"GenExprFor": 'Comprehension',
|
||||
'excepthandler': 'ExceptHandler',
|
||||
'Expr': 'Discard',
|
||||
'FunctionDef': 'Function',
|
||||
'GeneratorExp': 'GenExpr',
|
||||
'ImportFrom': 'From',
|
||||
'keyword': 'Keyword',
|
||||
'Repr': 'Backquote',
|
||||
}
|
||||
PY3K = sys.version_info >= (3, 0)
|
||||
PY34 = sys.version_info >= (3, 4)
|
||||
|
||||
def _init_set_doc(node, newnode):
|
||||
newnode.doc = None
|
||||
try:
|
||||
if isinstance(node.body[0], Discard) and isinstance(node.body[0].value, Str):
|
||||
newnode.doc = node.body[0].value.s
|
||||
node.body = node.body[1:]
|
||||
|
||||
except IndexError:
|
||||
pass # ast built from scratch
|
||||
|
||||
def _lineno_parent(oldnode, newnode, parent):
|
||||
newnode.parent = parent
|
||||
newnode.lineno = oldnode.lineno
|
||||
newnode.col_offset = oldnode.col_offset
|
||||
|
||||
def _set_infos(oldnode, newnode, parent):
|
||||
newnode.parent = parent
|
||||
if hasattr(oldnode, 'lineno'):
|
||||
newnode.lineno = oldnode.lineno
|
||||
if hasattr(oldnode, 'col_offset'):
|
||||
newnode.col_offset = oldnode.col_offset
|
||||
|
||||
def _create_yield_node(node, parent, rebuilder, factory):
|
||||
newnode = factory()
|
||||
_lineno_parent(node, newnode, parent)
|
||||
if node.value is not None:
|
||||
newnode.value = rebuilder.visit(node.value, newnode)
|
||||
return newnode
|
||||
|
||||
|
||||
class TreeRebuilder(object):
|
||||
"""Rebuilds the _ast tree to become an Astroid tree"""
|
||||
|
||||
def __init__(self, manager):
|
||||
self._manager = manager
|
||||
self.asscontext = None
|
||||
self._global_names = []
|
||||
self._from_nodes = []
|
||||
self._delayed_assattr = []
|
||||
self._visit_meths = {}
|
||||
self._transform = manager.transform
|
||||
self._peepholer = astpeephole.ASTPeepholeOptimizer()
|
||||
|
||||
def visit_module(self, node, modname, modpath, package):
|
||||
"""visit a Module node by returning a fresh instance of it"""
|
||||
newnode = new.Module(modname, None)
|
||||
newnode.package = package
|
||||
newnode.parent = None
|
||||
_init_set_doc(node, newnode)
|
||||
newnode.body = [self.visit(child, newnode) for child in node.body]
|
||||
newnode.file = newnode.path = modpath
|
||||
return self._transform(newnode)
|
||||
|
||||
def visit(self, node, parent):
|
||||
cls = node.__class__
|
||||
if cls in self._visit_meths:
|
||||
visit_method = self._visit_meths[cls]
|
||||
else:
|
||||
cls_name = cls.__name__
|
||||
visit_name = 'visit_' + REDIRECT.get(cls_name, cls_name).lower()
|
||||
visit_method = getattr(self, visit_name)
|
||||
self._visit_meths[cls] = visit_method
|
||||
return self._transform(visit_method(node, parent))
|
||||
|
||||
def _save_assignment(self, node, name=None):
|
||||
"""save assignement situation since node.parent is not available yet"""
|
||||
if self._global_names and node.name in self._global_names[-1]:
|
||||
node.root().set_local(node.name, node)
|
||||
else:
|
||||
node.parent.set_local(node.name, node)
|
||||
|
||||
|
||||
def visit_arguments(self, node, parent):
|
||||
"""visit a Arguments node by returning a fresh instance of it"""
|
||||
newnode = new.Arguments()
|
||||
newnode.parent = parent
|
||||
self.asscontext = "Ass"
|
||||
newnode.args = [self.visit(child, newnode) for child in node.args]
|
||||
self.asscontext = None
|
||||
newnode.defaults = [self.visit(child, newnode) for child in node.defaults]
|
||||
newnode.kwonlyargs = []
|
||||
newnode.kw_defaults = []
|
||||
vararg, kwarg = node.vararg, node.kwarg
|
||||
# change added in 82732 (7c5c678e4164), vararg and kwarg
|
||||
# are instances of `_ast.arg`, not strings
|
||||
if vararg:
|
||||
if PY34:
|
||||
if vararg.annotation:
|
||||
newnode.varargannotation = self.visit(vararg.annotation,
|
||||
newnode)
|
||||
vararg = vararg.arg
|
||||
elif PY3K and node.varargannotation:
|
||||
newnode.varargannotation = self.visit(node.varargannotation,
|
||||
newnode)
|
||||
if kwarg:
|
||||
if PY34:
|
||||
if kwarg.annotation:
|
||||
newnode.kwargannotation = self.visit(kwarg.annotation,
|
||||
newnode)
|
||||
kwarg = kwarg.arg
|
||||
elif PY3K:
|
||||
if node.kwargannotation:
|
||||
newnode.kwargannotation = self.visit(node.kwargannotation,
|
||||
newnode)
|
||||
newnode.vararg = vararg
|
||||
newnode.kwarg = kwarg
|
||||
# save argument names in locals:
|
||||
if vararg:
|
||||
newnode.parent.set_local(vararg, newnode)
|
||||
if kwarg:
|
||||
newnode.parent.set_local(kwarg, newnode)
|
||||
return newnode
|
||||
|
||||
def visit_assattr(self, node, parent):
|
||||
"""visit a AssAttr node by returning a fresh instance of it"""
|
||||
assc, self.asscontext = self.asscontext, None
|
||||
newnode = new.AssAttr()
|
||||
_lineno_parent(node, newnode, parent)
|
||||
newnode.expr = self.visit(node.expr, newnode)
|
||||
self.asscontext = assc
|
||||
self._delayed_assattr.append(newnode)
|
||||
return newnode
|
||||
|
||||
def visit_assert(self, node, parent):
|
||||
"""visit a Assert node by returning a fresh instance of it"""
|
||||
newnode = new.Assert()
|
||||
_lineno_parent(node, newnode, parent)
|
||||
newnode.test = self.visit(node.test, newnode)
|
||||
if node.msg is not None:
|
||||
newnode.fail = self.visit(node.msg, newnode)
|
||||
return newnode
|
||||
|
||||
def visit_assign(self, node, parent):
|
||||
"""visit a Assign node by returning a fresh instance of it"""
|
||||
newnode = new.Assign()
|
||||
_lineno_parent(node, newnode, parent)
|
||||
self.asscontext = "Ass"
|
||||
newnode.targets = [self.visit(child, newnode) for child in node.targets]
|
||||
self.asscontext = None
|
||||
newnode.value = self.visit(node.value, newnode)
|
||||
# set some function or metaclass infos XXX explain ?
|
||||
klass = newnode.parent.frame()
|
||||
if (isinstance(klass, new.Class)
|
||||
and isinstance(newnode.value, new.CallFunc)
|
||||
and isinstance(newnode.value.func, new.Name)):
|
||||
func_name = newnode.value.func.name
|
||||
for ass_node in newnode.targets:
|
||||
try:
|
||||
meth = klass[ass_node.name]
|
||||
if isinstance(meth, new.Function):
|
||||
if func_name in ('classmethod', 'staticmethod'):
|
||||
meth.type = func_name
|
||||
elif func_name == 'classproperty': # see lgc.decorators
|
||||
meth.type = 'classmethod'
|
||||
meth.extra_decorators.append(newnode.value)
|
||||
except (AttributeError, KeyError):
|
||||
continue
|
||||
return newnode
|
||||
|
||||
def visit_assname(self, node, parent, node_name=None):
|
||||
'''visit a node and return a AssName node'''
|
||||
newnode = new.AssName()
|
||||
_set_infos(node, newnode, parent)
|
||||
newnode.name = node_name
|
||||
self._save_assignment(newnode)
|
||||
return newnode
|
||||
|
||||
def visit_augassign(self, node, parent):
|
||||
"""visit a AugAssign node by returning a fresh instance of it"""
|
||||
newnode = new.AugAssign()
|
||||
_lineno_parent(node, newnode, parent)
|
||||
newnode.op = _BIN_OP_CLASSES[node.op.__class__] + "="
|
||||
self.asscontext = "Ass"
|
||||
newnode.target = self.visit(node.target, newnode)
|
||||
self.asscontext = None
|
||||
newnode.value = self.visit(node.value, newnode)
|
||||
return newnode
|
||||
|
||||
def visit_backquote(self, node, parent):
|
||||
"""visit a Backquote node by returning a fresh instance of it"""
|
||||
newnode = new.Backquote()
|
||||
_lineno_parent(node, newnode, parent)
|
||||
newnode.value = self.visit(node.value, newnode)
|
||||
return newnode
|
||||
|
||||
def visit_binop(self, node, parent):
|
||||
"""visit a BinOp node by returning a fresh instance of it"""
|
||||
if isinstance(node.left, BinOp) and self._manager.optimize_ast:
|
||||
# Optimize BinOp operations in order to remove
|
||||
# redundant recursion. For instance, if the
|
||||
# following code is parsed in order to obtain
|
||||
# its ast, then the rebuilder will fail with an
|
||||
# infinite recursion, the same will happen with the
|
||||
# inference engine as well. There's no need to hold
|
||||
# so many objects for the BinOp if they can be reduced
|
||||
# to something else (also, the optimization
|
||||
# might handle only Const binops, which isn't a big
|
||||
# problem for the correctness of the program).
|
||||
#
|
||||
# ("a" + "b" + # one thousand more + "c")
|
||||
newnode = self._peepholer.optimize_binop(node)
|
||||
if newnode:
|
||||
_lineno_parent(node, newnode, parent)
|
||||
return newnode
|
||||
|
||||
newnode = new.BinOp()
|
||||
_lineno_parent(node, newnode, parent)
|
||||
newnode.left = self.visit(node.left, newnode)
|
||||
newnode.right = self.visit(node.right, newnode)
|
||||
newnode.op = _BIN_OP_CLASSES[node.op.__class__]
|
||||
return newnode
|
||||
|
||||
def visit_boolop(self, node, parent):
|
||||
"""visit a BoolOp node by returning a fresh instance of it"""
|
||||
newnode = new.BoolOp()
|
||||
_lineno_parent(node, newnode, parent)
|
||||
newnode.values = [self.visit(child, newnode) for child in node.values]
|
||||
newnode.op = _BOOL_OP_CLASSES[node.op.__class__]
|
||||
return newnode
|
||||
|
||||
def visit_break(self, node, parent):
|
||||
"""visit a Break node by returning a fresh instance of it"""
|
||||
newnode = new.Break()
|
||||
_set_infos(node, newnode, parent)
|
||||
return newnode
|
||||
|
||||
def visit_callfunc(self, node, parent):
|
||||
"""visit a CallFunc node by returning a fresh instance of it"""
|
||||
newnode = new.CallFunc()
|
||||
_lineno_parent(node, newnode, parent)
|
||||
newnode.func = self.visit(node.func, newnode)
|
||||
newnode.args = [self.visit(child, newnode) for child in node.args]
|
||||
if node.starargs is not None:
|
||||
newnode.starargs = self.visit(node.starargs, newnode)
|
||||
if node.kwargs is not None:
|
||||
newnode.kwargs = self.visit(node.kwargs, newnode)
|
||||
for child in node.keywords:
|
||||
newnode.args.append(self.visit(child, newnode))
|
||||
return newnode
|
||||
|
||||
def visit_class(self, node, parent):
|
||||
"""visit a Class node to become astroid"""
|
||||
newnode = new.Class(node.name, None)
|
||||
_lineno_parent(node, newnode, parent)
|
||||
_init_set_doc(node, newnode)
|
||||
newnode.bases = [self.visit(child, newnode) for child in node.bases]
|
||||
newnode.body = [self.visit(child, newnode) for child in node.body]
|
||||
if 'decorator_list' in node._fields and node.decorator_list:# py >= 2.6
|
||||
newnode.decorators = self.visit_decorators(node, newnode)
|
||||
newnode.parent.frame().set_local(newnode.name, newnode)
|
||||
return newnode
|
||||
|
||||
def visit_const(self, node, parent):
|
||||
"""visit a Const node by returning a fresh instance of it"""
|
||||
newnode = new.Const(node.value)
|
||||
_set_infos(node, newnode, parent)
|
||||
return newnode
|
||||
|
||||
def visit_continue(self, node, parent):
|
||||
"""visit a Continue node by returning a fresh instance of it"""
|
||||
newnode = new.Continue()
|
||||
_set_infos(node, newnode, parent)
|
||||
return newnode
|
||||
|
||||
def visit_compare(self, node, parent):
|
||||
"""visit a Compare node by returning a fresh instance of it"""
|
||||
newnode = new.Compare()
|
||||
_lineno_parent(node, newnode, parent)
|
||||
newnode.left = self.visit(node.left, newnode)
|
||||
newnode.ops = [(_CMP_OP_CLASSES[op.__class__], self.visit(expr, newnode))
|
||||
for (op, expr) in zip(node.ops, node.comparators)]
|
||||
return newnode
|
||||
|
||||
def visit_comprehension(self, node, parent):
|
||||
"""visit a Comprehension node by returning a fresh instance of it"""
|
||||
newnode = new.Comprehension()
|
||||
newnode.parent = parent
|
||||
self.asscontext = "Ass"
|
||||
newnode.target = self.visit(node.target, newnode)
|
||||
self.asscontext = None
|
||||
newnode.iter = self.visit(node.iter, newnode)
|
||||
newnode.ifs = [self.visit(child, newnode) for child in node.ifs]
|
||||
return newnode
|
||||
|
||||
def visit_decorators(self, node, parent):
|
||||
"""visit a Decorators node by returning a fresh instance of it"""
|
||||
# /!\ node is actually a _ast.Function node while
|
||||
# parent is a astroid.nodes.Function node
|
||||
newnode = new.Decorators()
|
||||
_lineno_parent(node, newnode, parent)
|
||||
if 'decorators' in node._fields: # py < 2.6, i.e. 2.5
|
||||
decorators = node.decorators
|
||||
else:
|
||||
decorators = node.decorator_list
|
||||
newnode.nodes = [self.visit(child, newnode) for child in decorators]
|
||||
return newnode
|
||||
|
||||
def visit_delete(self, node, parent):
|
||||
"""visit a Delete node by returning a fresh instance of it"""
|
||||
newnode = new.Delete()
|
||||
_lineno_parent(node, newnode, parent)
|
||||
self.asscontext = "Del"
|
||||
newnode.targets = [self.visit(child, newnode) for child in node.targets]
|
||||
self.asscontext = None
|
||||
return newnode
|
||||
|
||||
def visit_dict(self, node, parent):
|
||||
"""visit a Dict node by returning a fresh instance of it"""
|
||||
newnode = new.Dict()
|
||||
_lineno_parent(node, newnode, parent)
|
||||
newnode.items = [(self.visit(key, newnode), self.visit(value, newnode))
|
||||
for key, value in zip(node.keys, node.values)]
|
||||
return newnode
|
||||
|
||||
def visit_dictcomp(self, node, parent):
|
||||
"""visit a DictComp node by returning a fresh instance of it"""
|
||||
newnode = new.DictComp()
|
||||
_lineno_parent(node, newnode, parent)
|
||||
newnode.key = self.visit(node.key, newnode)
|
||||
newnode.value = self.visit(node.value, newnode)
|
||||
newnode.generators = [self.visit(child, newnode)
|
||||
for child in node.generators]
|
||||
return newnode
|
||||
|
||||
def visit_discard(self, node, parent):
|
||||
"""visit a Discard node by returning a fresh instance of it"""
|
||||
newnode = new.Discard()
|
||||
_lineno_parent(node, newnode, parent)
|
||||
newnode.value = self.visit(node.value, newnode)
|
||||
return newnode
|
||||
|
||||
def visit_ellipsis(self, node, parent):
|
||||
"""visit an Ellipsis node by returning a fresh instance of it"""
|
||||
newnode = new.Ellipsis()
|
||||
_set_infos(node, newnode, parent)
|
||||
return newnode
|
||||
|
||||
def visit_emptynode(self, node, parent):
|
||||
"""visit an EmptyNode node by returning a fresh instance of it"""
|
||||
newnode = new.EmptyNode()
|
||||
_set_infos(node, newnode, parent)
|
||||
return newnode
|
||||
|
||||
def visit_excepthandler(self, node, parent):
|
||||
"""visit an ExceptHandler node by returning a fresh instance of it"""
|
||||
newnode = new.ExceptHandler()
|
||||
_lineno_parent(node, newnode, parent)
|
||||
if node.type is not None:
|
||||
newnode.type = self.visit(node.type, newnode)
|
||||
if node.name is not None:
|
||||
# /!\ node.name can be a tuple
|
||||
self.asscontext = "Ass"
|
||||
newnode.name = self.visit(node.name, newnode)
|
||||
self.asscontext = None
|
||||
newnode.body = [self.visit(child, newnode) for child in node.body]
|
||||
return newnode
|
||||
|
||||
def visit_exec(self, node, parent):
|
||||
"""visit an Exec node by returning a fresh instance of it"""
|
||||
newnode = new.Exec()
|
||||
_lineno_parent(node, newnode, parent)
|
||||
newnode.expr = self.visit(node.body, newnode)
|
||||
if node.globals is not None:
|
||||
newnode.globals = self.visit(node.globals, newnode)
|
||||
if node.locals is not None:
|
||||
newnode.locals = self.visit(node.locals, newnode)
|
||||
return newnode
|
||||
|
||||
def visit_extslice(self, node, parent):
|
||||
"""visit an ExtSlice node by returning a fresh instance of it"""
|
||||
newnode = new.ExtSlice()
|
||||
newnode.parent = parent
|
||||
newnode.dims = [self.visit(dim, newnode) for dim in node.dims]
|
||||
return newnode
|
||||
|
||||
def visit_for(self, node, parent):
|
||||
"""visit a For node by returning a fresh instance of it"""
|
||||
newnode = new.For()
|
||||
_lineno_parent(node, newnode, parent)
|
||||
self.asscontext = "Ass"
|
||||
newnode.target = self.visit(node.target, newnode)
|
||||
self.asscontext = None
|
||||
newnode.iter = self.visit(node.iter, newnode)
|
||||
newnode.body = [self.visit(child, newnode) for child in node.body]
|
||||
newnode.orelse = [self.visit(child, newnode) for child in node.orelse]
|
||||
return newnode
|
||||
|
||||
def visit_from(self, node, parent):
|
||||
"""visit a From node by returning a fresh instance of it"""
|
||||
names = [(alias.name, alias.asname) for alias in node.names]
|
||||
newnode = new.From(node.module or '', names, node.level or None)
|
||||
_set_infos(node, newnode, parent)
|
||||
# store From names to add them to locals after building
|
||||
self._from_nodes.append(newnode)
|
||||
return newnode
|
||||
|
||||
def visit_function(self, node, parent):
|
||||
"""visit an Function node to become astroid"""
|
||||
self._global_names.append({})
|
||||
newnode = new.Function(node.name, None)
|
||||
_lineno_parent(node, newnode, parent)
|
||||
_init_set_doc(node, newnode)
|
||||
newnode.args = self.visit(node.args, newnode)
|
||||
newnode.body = [self.visit(child, newnode) for child in node.body]
|
||||
if 'decorators' in node._fields: # py < 2.6
|
||||
attr = 'decorators'
|
||||
else:
|
||||
attr = 'decorator_list'
|
||||
decorators = getattr(node, attr)
|
||||
if decorators:
|
||||
newnode.decorators = self.visit_decorators(node, newnode)
|
||||
if PY3K and node.returns:
|
||||
newnode.returns = self.visit(node.returns, newnode)
|
||||
self._global_names.pop()
|
||||
frame = newnode.parent.frame()
|
||||
if isinstance(frame, new.Class):
|
||||
if newnode.name == '__new__':
|
||||
newnode._type = 'classmethod'
|
||||
else:
|
||||
newnode._type = 'method'
|
||||
if newnode.decorators is not None:
|
||||
for decorator_expr in newnode.decorators.nodes:
|
||||
if isinstance(decorator_expr, new.Name):
|
||||
if decorator_expr.name in ('classmethod', 'staticmethod'):
|
||||
newnode._type = decorator_expr.name
|
||||
elif decorator_expr.name == 'classproperty':
|
||||
newnode._type = 'classmethod'
|
||||
frame.set_local(newnode.name, newnode)
|
||||
return newnode
|
||||
|
||||
def visit_genexpr(self, node, parent):
|
||||
"""visit a GenExpr node by returning a fresh instance of it"""
|
||||
newnode = new.GenExpr()
|
||||
_lineno_parent(node, newnode, parent)
|
||||
newnode.elt = self.visit(node.elt, newnode)
|
||||
newnode.generators = [self.visit(child, newnode) for child in node.generators]
|
||||
return newnode
|
||||
|
||||
def visit_getattr(self, node, parent):
|
||||
"""visit a Getattr node by returning a fresh instance of it"""
|
||||
if self.asscontext == "Del":
|
||||
# FIXME : maybe we should reintroduce and visit_delattr ?
|
||||
# for instance, deactivating asscontext
|
||||
newnode = new.DelAttr()
|
||||
elif self.asscontext == "Ass":
|
||||
# FIXME : maybe we should call visit_assattr ?
|
||||
newnode = new.AssAttr()
|
||||
self._delayed_assattr.append(newnode)
|
||||
else:
|
||||
newnode = new.Getattr()
|
||||
_lineno_parent(node, newnode, parent)
|
||||
asscontext, self.asscontext = self.asscontext, None
|
||||
newnode.expr = self.visit(node.value, newnode)
|
||||
self.asscontext = asscontext
|
||||
newnode.attrname = node.attr
|
||||
return newnode
|
||||
|
||||
def visit_global(self, node, parent):
|
||||
"""visit an Global node to become astroid"""
|
||||
newnode = new.Global(node.names)
|
||||
_set_infos(node, newnode, parent)
|
||||
if self._global_names: # global at the module level, no effect
|
||||
for name in node.names:
|
||||
self._global_names[-1].setdefault(name, []).append(newnode)
|
||||
return newnode
|
||||
|
||||
def visit_if(self, node, parent):
|
||||
"""visit a If node by returning a fresh instance of it"""
|
||||
newnode = new.If()
|
||||
_lineno_parent(node, newnode, parent)
|
||||
newnode.test = self.visit(node.test, newnode)
|
||||
newnode.body = [self.visit(child, newnode) for child in node.body]
|
||||
newnode.orelse = [self.visit(child, newnode) for child in node.orelse]
|
||||
return newnode
|
||||
|
||||
def visit_ifexp(self, node, parent):
|
||||
"""visit a IfExp node by returning a fresh instance of it"""
|
||||
newnode = new.IfExp()
|
||||
_lineno_parent(node, newnode, parent)
|
||||
newnode.test = self.visit(node.test, newnode)
|
||||
newnode.body = self.visit(node.body, newnode)
|
||||
newnode.orelse = self.visit(node.orelse, newnode)
|
||||
return newnode
|
||||
|
||||
def visit_import(self, node, parent):
|
||||
"""visit a Import node by returning a fresh instance of it"""
|
||||
newnode = new.Import()
|
||||
_set_infos(node, newnode, parent)
|
||||
newnode.names = [(alias.name, alias.asname) for alias in node.names]
|
||||
# save import names in parent's locals:
|
||||
for (name, asname) in newnode.names:
|
||||
name = asname or name
|
||||
newnode.parent.set_local(name.split('.')[0], newnode)
|
||||
return newnode
|
||||
|
||||
def visit_index(self, node, parent):
|
||||
"""visit a Index node by returning a fresh instance of it"""
|
||||
newnode = new.Index()
|
||||
newnode.parent = parent
|
||||
newnode.value = self.visit(node.value, newnode)
|
||||
return newnode
|
||||
|
||||
def visit_keyword(self, node, parent):
|
||||
"""visit a Keyword node by returning a fresh instance of it"""
|
||||
newnode = new.Keyword()
|
||||
newnode.parent = parent
|
||||
newnode.arg = node.arg
|
||||
newnode.value = self.visit(node.value, newnode)
|
||||
return newnode
|
||||
|
||||
def visit_lambda(self, node, parent):
|
||||
"""visit a Lambda node by returning a fresh instance of it"""
|
||||
newnode = new.Lambda()
|
||||
_lineno_parent(node, newnode, parent)
|
||||
newnode.args = self.visit(node.args, newnode)
|
||||
newnode.body = self.visit(node.body, newnode)
|
||||
return newnode
|
||||
|
||||
def visit_list(self, node, parent):
|
||||
"""visit a List node by returning a fresh instance of it"""
|
||||
newnode = new.List()
|
||||
_lineno_parent(node, newnode, parent)
|
||||
newnode.elts = [self.visit(child, newnode) for child in node.elts]
|
||||
return newnode
|
||||
|
||||
def visit_listcomp(self, node, parent):
|
||||
"""visit a ListComp node by returning a fresh instance of it"""
|
||||
newnode = new.ListComp()
|
||||
_lineno_parent(node, newnode, parent)
|
||||
newnode.elt = self.visit(node.elt, newnode)
|
||||
newnode.generators = [self.visit(child, newnode)
|
||||
for child in node.generators]
|
||||
return newnode
|
||||
|
||||
def visit_name(self, node, parent):
|
||||
"""visit a Name node by returning a fresh instance of it"""
|
||||
# True and False can be assigned to something in py2x, so we have to
|
||||
# check first the asscontext
|
||||
if self.asscontext == "Del":
|
||||
newnode = new.DelName()
|
||||
elif self.asscontext is not None: # Ass
|
||||
assert self.asscontext == "Ass"
|
||||
newnode = new.AssName()
|
||||
elif node.id in CONST_NAME_TRANSFORMS:
|
||||
newnode = new.Const(CONST_NAME_TRANSFORMS[node.id])
|
||||
_set_infos(node, newnode, parent)
|
||||
return newnode
|
||||
else:
|
||||
newnode = new.Name()
|
||||
_lineno_parent(node, newnode, parent)
|
||||
newnode.name = node.id
|
||||
# XXX REMOVE me :
|
||||
if self.asscontext in ('Del', 'Ass'): # 'Aug' ??
|
||||
self._save_assignment(newnode)
|
||||
return newnode
|
||||
|
||||
def visit_bytes(self, node, parent):
|
||||
"""visit a Bytes node by returning a fresh instance of Const"""
|
||||
newnode = new.Const(node.s)
|
||||
_set_infos(node, newnode, parent)
|
||||
return newnode
|
||||
|
||||
def visit_num(self, node, parent):
|
||||
"""visit a Num node by returning a fresh instance of Const"""
|
||||
newnode = new.Const(node.n)
|
||||
_set_infos(node, newnode, parent)
|
||||
return newnode
|
||||
|
||||
def visit_pass(self, node, parent):
|
||||
"""visit a Pass node by returning a fresh instance of it"""
|
||||
newnode = new.Pass()
|
||||
_set_infos(node, newnode, parent)
|
||||
return newnode
|
||||
|
||||
def visit_str(self, node, parent):
|
||||
"""visit a Str node by returning a fresh instance of Const"""
|
||||
newnode = new.Const(node.s)
|
||||
_set_infos(node, newnode, parent)
|
||||
return newnode
|
||||
|
||||
def visit_print(self, node, parent):
|
||||
"""visit a Print node by returning a fresh instance of it"""
|
||||
newnode = new.Print()
|
||||
_lineno_parent(node, newnode, parent)
|
||||
newnode.nl = node.nl
|
||||
if node.dest is not None:
|
||||
newnode.dest = self.visit(node.dest, newnode)
|
||||
newnode.values = [self.visit(child, newnode) for child in node.values]
|
||||
return newnode
|
||||
|
||||
def visit_raise(self, node, parent):
|
||||
"""visit a Raise node by returning a fresh instance of it"""
|
||||
newnode = new.Raise()
|
||||
_lineno_parent(node, newnode, parent)
|
||||
if node.type is not None:
|
||||
newnode.exc = self.visit(node.type, newnode)
|
||||
if node.inst is not None:
|
||||
newnode.inst = self.visit(node.inst, newnode)
|
||||
if node.tback is not None:
|
||||
newnode.tback = self.visit(node.tback, newnode)
|
||||
return newnode
|
||||
|
||||
def visit_return(self, node, parent):
|
||||
"""visit a Return node by returning a fresh instance of it"""
|
||||
newnode = new.Return()
|
||||
_lineno_parent(node, newnode, parent)
|
||||
if node.value is not None:
|
||||
newnode.value = self.visit(node.value, newnode)
|
||||
return newnode
|
||||
|
||||
def visit_set(self, node, parent):
|
||||
"""visit a Set node by returning a fresh instance of it"""
|
||||
newnode = new.Set()
|
||||
_lineno_parent(node, newnode, parent)
|
||||
newnode.elts = [self.visit(child, newnode) for child in node.elts]
|
||||
return newnode
|
||||
|
||||
def visit_setcomp(self, node, parent):
|
||||
"""visit a SetComp node by returning a fresh instance of it"""
|
||||
newnode = new.SetComp()
|
||||
_lineno_parent(node, newnode, parent)
|
||||
newnode.elt = self.visit(node.elt, newnode)
|
||||
newnode.generators = [self.visit(child, newnode)
|
||||
for child in node.generators]
|
||||
return newnode
|
||||
|
||||
def visit_slice(self, node, parent):
|
||||
"""visit a Slice node by returning a fresh instance of it"""
|
||||
newnode = new.Slice()
|
||||
newnode.parent = parent
|
||||
if node.lower is not None:
|
||||
newnode.lower = self.visit(node.lower, newnode)
|
||||
if node.upper is not None:
|
||||
newnode.upper = self.visit(node.upper, newnode)
|
||||
if node.step is not None:
|
||||
newnode.step = self.visit(node.step, newnode)
|
||||
return newnode
|
||||
|
||||
def visit_subscript(self, node, parent):
|
||||
"""visit a Subscript node by returning a fresh instance of it"""
|
||||
newnode = new.Subscript()
|
||||
_lineno_parent(node, newnode, parent)
|
||||
subcontext, self.asscontext = self.asscontext, None
|
||||
newnode.value = self.visit(node.value, newnode)
|
||||
newnode.slice = self.visit(node.slice, newnode)
|
||||
self.asscontext = subcontext
|
||||
return newnode
|
||||
|
||||
def visit_tryexcept(self, node, parent):
|
||||
"""visit a TryExcept node by returning a fresh instance of it"""
|
||||
newnode = new.TryExcept()
|
||||
_lineno_parent(node, newnode, parent)
|
||||
newnode.body = [self.visit(child, newnode) for child in node.body]
|
||||
newnode.handlers = [self.visit(child, newnode) for child in node.handlers]
|
||||
newnode.orelse = [self.visit(child, newnode) for child in node.orelse]
|
||||
return newnode
|
||||
|
||||
def visit_tryfinally(self, node, parent):
|
||||
"""visit a TryFinally node by returning a fresh instance of it"""
|
||||
newnode = new.TryFinally()
|
||||
_lineno_parent(node, newnode, parent)
|
||||
newnode.body = [self.visit(child, newnode) for child in node.body]
|
||||
newnode.finalbody = [self.visit(n, newnode) for n in node.finalbody]
|
||||
return newnode
|
||||
|
||||
def visit_tuple(self, node, parent):
|
||||
"""visit a Tuple node by returning a fresh instance of it"""
|
||||
newnode = new.Tuple()
|
||||
_lineno_parent(node, newnode, parent)
|
||||
newnode.elts = [self.visit(child, newnode) for child in node.elts]
|
||||
return newnode
|
||||
|
||||
def visit_unaryop(self, node, parent):
|
||||
"""visit a UnaryOp node by returning a fresh instance of it"""
|
||||
newnode = new.UnaryOp()
|
||||
_lineno_parent(node, newnode, parent)
|
||||
newnode.operand = self.visit(node.operand, newnode)
|
||||
newnode.op = _UNARY_OP_CLASSES[node.op.__class__]
|
||||
return newnode
|
||||
|
||||
def visit_while(self, node, parent):
|
||||
"""visit a While node by returning a fresh instance of it"""
|
||||
newnode = new.While()
|
||||
_lineno_parent(node, newnode, parent)
|
||||
newnode.test = self.visit(node.test, newnode)
|
||||
newnode.body = [self.visit(child, newnode) for child in node.body]
|
||||
newnode.orelse = [self.visit(child, newnode) for child in node.orelse]
|
||||
return newnode
|
||||
|
||||
def visit_with(self, node, parent):
|
||||
newnode = new.With()
|
||||
_lineno_parent(node, newnode, parent)
|
||||
expr = self.visit(node.context_expr, newnode)
|
||||
self.asscontext = "Ass"
|
||||
if node.optional_vars is not None:
|
||||
vars = self.visit(node.optional_vars, newnode)
|
||||
else:
|
||||
vars = None
|
||||
self.asscontext = None
|
||||
newnode.items = [(expr, vars)]
|
||||
newnode.body = [self.visit(child, newnode) for child in node.body]
|
||||
return newnode
|
||||
|
||||
def visit_yield(self, node, parent):
|
||||
"""visit a Yield node by returning a fresh instance of it"""
|
||||
return _create_yield_node(node, parent, self, new.Yield)
|
||||
|
||||
class TreeRebuilder3k(TreeRebuilder):
|
||||
"""extend and overwrite TreeRebuilder for python3k"""
|
||||
|
||||
def visit_arg(self, node, parent):
|
||||
"""visit a arg node by returning a fresh AssName instance"""
|
||||
# the <arg> node is coming from py>=3.0, but we use AssName in py2.x
|
||||
# XXX or we should instead introduce a Arg node in astroid ?
|
||||
return self.visit_assname(node, parent, node.arg)
|
||||
|
||||
def visit_nameconstant(self, node, parent):
|
||||
# in Python 3.4 we have NameConstant for True / False / None
|
||||
newnode = new.Const(node.value)
|
||||
_set_infos(node, newnode, parent)
|
||||
return newnode
|
||||
|
||||
def visit_arguments(self, node, parent):
|
||||
newnode = super(TreeRebuilder3k, self).visit_arguments(node, parent)
|
||||
self.asscontext = "Ass"
|
||||
newnode.kwonlyargs = [self.visit(child, newnode) for child in node.kwonlyargs]
|
||||
self.asscontext = None
|
||||
newnode.kw_defaults = [self.visit(child, newnode) if child else None for child in node.kw_defaults]
|
||||
newnode.annotations = [
|
||||
self.visit(arg.annotation, newnode) if arg.annotation else None
|
||||
for arg in node.args]
|
||||
return newnode
|
||||
|
||||
def visit_excepthandler(self, node, parent):
|
||||
"""visit an ExceptHandler node by returning a fresh instance of it"""
|
||||
newnode = new.ExceptHandler()
|
||||
_lineno_parent(node, newnode, parent)
|
||||
if node.type is not None:
|
||||
newnode.type = self.visit(node.type, newnode)
|
||||
if node.name is not None:
|
||||
newnode.name = self.visit_assname(node, newnode, node.name)
|
||||
newnode.body = [self.visit(child, newnode) for child in node.body]
|
||||
return newnode
|
||||
|
||||
def visit_nonlocal(self, node, parent):
|
||||
"""visit a Nonlocal node and return a new instance of it"""
|
||||
newnode = new.Nonlocal(node.names)
|
||||
_set_infos(node, newnode, parent)
|
||||
return newnode
|
||||
|
||||
def visit_raise(self, node, parent):
|
||||
"""visit a Raise node by returning a fresh instance of it"""
|
||||
newnode = new.Raise()
|
||||
_lineno_parent(node, newnode, parent)
|
||||
# no traceback; anyway it is not used in Pylint
|
||||
if node.exc is not None:
|
||||
newnode.exc = self.visit(node.exc, newnode)
|
||||
if node.cause is not None:
|
||||
newnode.cause = self.visit(node.cause, newnode)
|
||||
return newnode
|
||||
|
||||
def visit_starred(self, node, parent):
|
||||
"""visit a Starred node and return a new instance of it"""
|
||||
newnode = new.Starred()
|
||||
_lineno_parent(node, newnode, parent)
|
||||
newnode.value = self.visit(node.value, newnode)
|
||||
return newnode
|
||||
|
||||
def visit_try(self, node, parent):
|
||||
# python 3.3 introduce a new Try node replacing TryFinally/TryExcept nodes
|
||||
if node.finalbody:
|
||||
newnode = new.TryFinally()
|
||||
_lineno_parent(node, newnode, parent)
|
||||
newnode.finalbody = [self.visit(n, newnode) for n in node.finalbody]
|
||||
if node.handlers:
|
||||
excnode = new.TryExcept()
|
||||
_lineno_parent(node, excnode, newnode)
|
||||
excnode.body = [self.visit(child, excnode) for child in node.body]
|
||||
excnode.handlers = [self.visit(child, excnode) for child in node.handlers]
|
||||
excnode.orelse = [self.visit(child, excnode) for child in node.orelse]
|
||||
newnode.body = [excnode]
|
||||
else:
|
||||
newnode.body = [self.visit(child, newnode) for child in node.body]
|
||||
elif node.handlers:
|
||||
newnode = new.TryExcept()
|
||||
_lineno_parent(node, newnode, parent)
|
||||
newnode.body = [self.visit(child, newnode) for child in node.body]
|
||||
newnode.handlers = [self.visit(child, newnode) for child in node.handlers]
|
||||
newnode.orelse = [self.visit(child, newnode) for child in node.orelse]
|
||||
return newnode
|
||||
|
||||
def visit_with(self, node, parent):
|
||||
if 'items' not in node._fields:
|
||||
# python < 3.3
|
||||
return super(TreeRebuilder3k, self).visit_with(node, parent)
|
||||
|
||||
newnode = new.With()
|
||||
_lineno_parent(node, newnode, parent)
|
||||
def visit_child(child):
|
||||
expr = self.visit(child.context_expr, newnode)
|
||||
self.asscontext = 'Ass'
|
||||
if child.optional_vars:
|
||||
var = self.visit(child.optional_vars, newnode)
|
||||
else:
|
||||
var = None
|
||||
self.asscontext = None
|
||||
return expr, var
|
||||
newnode.items = [visit_child(child)
|
||||
for child in node.items]
|
||||
newnode.body = [self.visit(child, newnode) for child in node.body]
|
||||
return newnode
|
||||
|
||||
def visit_yieldfrom(self, node, parent):
|
||||
return _create_yield_node(node, parent, self, new.YieldFrom)
|
||||
|
||||
def visit_class(self, node, parent):
|
||||
newnode = super(TreeRebuilder3k, self).visit_class(node, parent)
|
||||
newnode._newstyle = True
|
||||
for keyword in node.keywords:
|
||||
if keyword.arg == 'metaclass':
|
||||
newnode._metaclass = self.visit(keyword, newnode).value
|
||||
break
|
||||
return newnode
|
||||
|
||||
if sys.version_info >= (3, 0):
|
||||
TreeRebuilder = TreeRebuilder3k
|
||||
|
||||
|
||||
1484
plugins/bundle/python-mode/pymode/libs/astroid/scoped_nodes.py
Normal file
1484
plugins/bundle/python-mode/pymode/libs/astroid/scoped_nodes.py
Normal file
File diff suppressed because it is too large
Load diff
218
plugins/bundle/python-mode/pymode/libs/astroid/test_utils.py
Normal file
218
plugins/bundle/python-mode/pymode/libs/astroid/test_utils.py
Normal file
|
|
@ -0,0 +1,218 @@
|
|||
"""Utility functions for test code that uses astroid ASTs as input."""
|
||||
import functools
|
||||
import sys
|
||||
import textwrap
|
||||
|
||||
from astroid import nodes
|
||||
from astroid import builder
|
||||
# The name of the transient function that is used to
|
||||
# wrap expressions to be extracted when calling
|
||||
# extract_node.
|
||||
_TRANSIENT_FUNCTION = '__'
|
||||
|
||||
# The comment used to select a statement to be extracted
|
||||
# when calling extract_node.
|
||||
_STATEMENT_SELECTOR = '#@'
|
||||
|
||||
|
||||
def _extract_expressions(node):
|
||||
"""Find expressions in a call to _TRANSIENT_FUNCTION and extract them.
|
||||
|
||||
The function walks the AST recursively to search for expressions that
|
||||
are wrapped into a call to _TRANSIENT_FUNCTION. If it finds such an
|
||||
expression, it completely removes the function call node from the tree,
|
||||
replacing it by the wrapped expression inside the parent.
|
||||
|
||||
:param node: An astroid node.
|
||||
:type node: astroid.bases.NodeNG
|
||||
:yields: The sequence of wrapped expressions on the modified tree
|
||||
expression can be found.
|
||||
"""
|
||||
if (isinstance(node, nodes.CallFunc)
|
||||
and isinstance(node.func, nodes.Name)
|
||||
and node.func.name == _TRANSIENT_FUNCTION):
|
||||
real_expr = node.args[0]
|
||||
real_expr.parent = node.parent
|
||||
# Search for node in all _astng_fields (the fields checked when
|
||||
# get_children is called) of its parent. Some of those fields may
|
||||
# be lists or tuples, in which case the elements need to be checked.
|
||||
# When we find it, replace it by real_expr, so that the AST looks
|
||||
# like no call to _TRANSIENT_FUNCTION ever took place.
|
||||
for name in node.parent._astroid_fields:
|
||||
child = getattr(node.parent, name)
|
||||
if isinstance(child, (list, tuple)):
|
||||
for idx, compound_child in enumerate(child):
|
||||
if compound_child is node:
|
||||
child[idx] = real_expr
|
||||
elif child is node:
|
||||
setattr(node.parent, name, real_expr)
|
||||
yield real_expr
|
||||
else:
|
||||
for child in node.get_children():
|
||||
for result in _extract_expressions(child):
|
||||
yield result
|
||||
|
||||
|
||||
def _find_statement_by_line(node, line):
|
||||
"""Extracts the statement on a specific line from an AST.
|
||||
|
||||
If the line number of node matches line, it will be returned;
|
||||
otherwise its children are iterated and the function is called
|
||||
recursively.
|
||||
|
||||
:param node: An astroid node.
|
||||
:type node: astroid.bases.NodeNG
|
||||
:param line: The line number of the statement to extract.
|
||||
:type line: int
|
||||
:returns: The statement on the line, or None if no statement for the line
|
||||
can be found.
|
||||
:rtype: astroid.bases.NodeNG or None
|
||||
"""
|
||||
if isinstance(node, (nodes.Class, nodes.Function)):
|
||||
# This is an inaccuracy in the AST: the nodes that can be
|
||||
# decorated do not carry explicit information on which line
|
||||
# the actual definition (class/def), but .fromline seems to
|
||||
# be close enough.
|
||||
node_line = node.fromlineno
|
||||
else:
|
||||
node_line = node.lineno
|
||||
|
||||
if node_line == line:
|
||||
return node
|
||||
|
||||
for child in node.get_children():
|
||||
result = _find_statement_by_line(child, line)
|
||||
if result:
|
||||
return result
|
||||
|
||||
return None
|
||||
|
||||
def extract_node(code, module_name=''):
|
||||
"""Parses some Python code as a module and extracts a designated AST node.
|
||||
|
||||
Statements:
|
||||
To extract one or more statement nodes, append #@ to the end of the line
|
||||
|
||||
Examples:
|
||||
>>> def x():
|
||||
>>> def y():
|
||||
>>> return 1 #@
|
||||
|
||||
The return statement will be extracted.
|
||||
|
||||
>>> class X(object):
|
||||
>>> def meth(self): #@
|
||||
>>> pass
|
||||
|
||||
The funcion object 'meth' will be extracted.
|
||||
|
||||
Expressions:
|
||||
To extract arbitrary expressions, surround them with the fake
|
||||
function call __(...). After parsing, the surrounded expression
|
||||
will be returned and the whole AST (accessible via the returned
|
||||
node's parent attribute) will look like the function call was
|
||||
never there in the first place.
|
||||
|
||||
Examples:
|
||||
>>> a = __(1)
|
||||
|
||||
The const node will be extracted.
|
||||
|
||||
>>> def x(d=__(foo.bar)): pass
|
||||
|
||||
The node containing the default argument will be extracted.
|
||||
|
||||
>>> def foo(a, b):
|
||||
>>> return 0 < __(len(a)) < b
|
||||
|
||||
The node containing the function call 'len' will be extracted.
|
||||
|
||||
If no statements or expressions are selected, the last toplevel
|
||||
statement will be returned.
|
||||
|
||||
If the selected statement is a discard statement, (i.e. an expression
|
||||
turned into a statement), the wrapped expression is returned instead.
|
||||
|
||||
For convenience, singleton lists are unpacked.
|
||||
|
||||
:param str code: A piece of Python code that is parsed as
|
||||
a module. Will be passed through textwrap.dedent first.
|
||||
:param str module_name: The name of the module.
|
||||
:returns: The designated node from the parse tree, or a list of nodes.
|
||||
:rtype: astroid.bases.NodeNG, or a list of nodes.
|
||||
"""
|
||||
def _extract(node):
|
||||
if isinstance(node, nodes.Discard):
|
||||
return node.value
|
||||
else:
|
||||
return node
|
||||
|
||||
requested_lines = []
|
||||
for idx, line in enumerate(code.splitlines()):
|
||||
if line.strip().endswith(_STATEMENT_SELECTOR):
|
||||
requested_lines.append(idx + 1)
|
||||
|
||||
tree = build_module(code, module_name=module_name)
|
||||
extracted = []
|
||||
if requested_lines:
|
||||
for line in requested_lines:
|
||||
extracted.append(_find_statement_by_line(tree, line))
|
||||
|
||||
# Modifies the tree.
|
||||
extracted.extend(_extract_expressions(tree))
|
||||
|
||||
if not extracted:
|
||||
extracted.append(tree.body[-1])
|
||||
|
||||
extracted = [_extract(node) for node in extracted]
|
||||
if len(extracted) == 1:
|
||||
return extracted[0]
|
||||
else:
|
||||
return extracted
|
||||
|
||||
|
||||
def build_module(code, module_name='', path=None):
|
||||
"""Parses a string module with a builder.
|
||||
:param code: The code for the module.
|
||||
:type code: str
|
||||
:param module_name: The name for the module
|
||||
:type module_name: str
|
||||
:param path: The path for the module
|
||||
:type module_name: str
|
||||
:returns: The module AST.
|
||||
:rtype: astroid.bases.NodeNG
|
||||
"""
|
||||
code = textwrap.dedent(code)
|
||||
return builder.AstroidBuilder(None).string_build(code, modname=module_name, path=path)
|
||||
|
||||
|
||||
def require_version(minver=None, maxver=None):
|
||||
""" Compare version of python interpreter to the given one. Skip the test
|
||||
if older.
|
||||
"""
|
||||
def parse(string, default=None):
|
||||
string = string or default
|
||||
try:
|
||||
return tuple(int(v) for v in string.split('.'))
|
||||
except ValueError:
|
||||
raise ValueError('%s is not a correct version : should be X.Y[.Z].' % version)
|
||||
|
||||
def check_require_version(f):
|
||||
current = sys.version_info[:3]
|
||||
if parse(minver, "0") < current <= parse(maxver, "4"):
|
||||
return f
|
||||
else:
|
||||
str_version = '.'.join(str(v) for v in sys.version_info)
|
||||
@functools.wraps(f)
|
||||
def new_f(self, *args, **kwargs):
|
||||
if minver is not None:
|
||||
self.skipTest('Needs Python > %s. Current version is %s.' % (minver, str_version))
|
||||
elif maxver is not None:
|
||||
self.skipTest('Needs Python <= %s. Current version is %s.' % (maxver, str_version))
|
||||
return new_f
|
||||
|
||||
|
||||
return check_require_version
|
||||
|
||||
def get_name_node(start_from, name, index=0):
|
||||
return [n for n in start_from.nodes_of_class(nodes.Name) if n.name == name][index]
|
||||
239
plugins/bundle/python-mode/pymode/libs/astroid/utils.py
Normal file
239
plugins/bundle/python-mode/pymode/libs/astroid/utils.py
Normal file
|
|
@ -0,0 +1,239 @@
|
|||
# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
|
||||
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
|
||||
#
|
||||
# This file is part of astroid.
|
||||
#
|
||||
# astroid is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by the
|
||||
# Free Software Foundation, either version 2.1 of the License, or (at your
|
||||
# option) any later version.
|
||||
#
|
||||
# astroid is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
|
||||
# for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License along
|
||||
# with astroid. If not, see <http://www.gnu.org/licenses/>.
|
||||
"""this module contains some utilities to navigate in the tree or to
|
||||
extract information from it
|
||||
"""
|
||||
from __future__ import print_function
|
||||
|
||||
__docformat__ = "restructuredtext en"
|
||||
|
||||
from astroid.exceptions import AstroidBuildingException
|
||||
from astroid.builder import parse
|
||||
|
||||
|
||||
class ASTWalker(object):
|
||||
"""a walker visiting a tree in preorder, calling on the handler:
|
||||
|
||||
* visit_<class name> on entering a node, where class name is the class of
|
||||
the node in lower case
|
||||
|
||||
* leave_<class name> on leaving a node, where class name is the class of
|
||||
the node in lower case
|
||||
"""
|
||||
|
||||
def __init__(self, handler):
|
||||
self.handler = handler
|
||||
self._cache = {}
|
||||
|
||||
def walk(self, node, _done=None):
|
||||
"""walk on the tree from <node>, getting callbacks from handler"""
|
||||
if _done is None:
|
||||
_done = set()
|
||||
if node in _done:
|
||||
raise AssertionError((id(node), node, node.parent))
|
||||
_done.add(node)
|
||||
self.visit(node)
|
||||
for child_node in node.get_children():
|
||||
self.handler.set_context(node, child_node)
|
||||
assert child_node is not node
|
||||
self.walk(child_node, _done)
|
||||
self.leave(node)
|
||||
assert node.parent is not node
|
||||
|
||||
def get_callbacks(self, node):
|
||||
"""get callbacks from handler for the visited node"""
|
||||
klass = node.__class__
|
||||
methods = self._cache.get(klass)
|
||||
if methods is None:
|
||||
handler = self.handler
|
||||
kid = klass.__name__.lower()
|
||||
e_method = getattr(handler, 'visit_%s' % kid,
|
||||
getattr(handler, 'visit_default', None))
|
||||
l_method = getattr(handler, 'leave_%s' % kid,
|
||||
getattr(handler, 'leave_default', None))
|
||||
self._cache[klass] = (e_method, l_method)
|
||||
else:
|
||||
e_method, l_method = methods
|
||||
return e_method, l_method
|
||||
|
||||
def visit(self, node):
|
||||
"""walk on the tree from <node>, getting callbacks from handler"""
|
||||
method = self.get_callbacks(node)[0]
|
||||
if method is not None:
|
||||
method(node)
|
||||
|
||||
def leave(self, node):
|
||||
"""walk on the tree from <node>, getting callbacks from handler"""
|
||||
method = self.get_callbacks(node)[1]
|
||||
if method is not None:
|
||||
method(node)
|
||||
|
||||
|
||||
class LocalsVisitor(ASTWalker):
|
||||
"""visit a project by traversing the locals dictionary"""
|
||||
def __init__(self):
|
||||
ASTWalker.__init__(self, self)
|
||||
self._visited = {}
|
||||
|
||||
def visit(self, node):
|
||||
"""launch the visit starting from the given node"""
|
||||
if node in self._visited:
|
||||
return
|
||||
self._visited[node] = 1 # FIXME: use set ?
|
||||
methods = self.get_callbacks(node)
|
||||
if methods[0] is not None:
|
||||
methods[0](node)
|
||||
if 'locals' in node.__dict__: # skip Instance and other proxy
|
||||
for local_node in node.values():
|
||||
self.visit(local_node)
|
||||
if methods[1] is not None:
|
||||
return methods[1](node)
|
||||
|
||||
|
||||
def _check_children(node):
|
||||
"""a helper function to check children - parent relations"""
|
||||
for child in node.get_children():
|
||||
ok = False
|
||||
if child is None:
|
||||
print("Hm, child of %s is None" % node)
|
||||
continue
|
||||
if not hasattr(child, 'parent'):
|
||||
print(" ERROR: %s has child %s %x with no parent" % (
|
||||
node, child, id(child)))
|
||||
elif not child.parent:
|
||||
print(" ERROR: %s has child %s %x with parent %r" % (
|
||||
node, child, id(child), child.parent))
|
||||
elif child.parent is not node:
|
||||
print(" ERROR: %s %x has child %s %x with wrong parent %s" % (
|
||||
node, id(node), child, id(child), child.parent))
|
||||
else:
|
||||
ok = True
|
||||
if not ok:
|
||||
print("lines;", node.lineno, child.lineno)
|
||||
print("of module", node.root(), node.root().name)
|
||||
raise AstroidBuildingException
|
||||
_check_children(child)
|
||||
|
||||
|
||||
class TreeTester(object):
|
||||
'''A helper class to see _ast tree and compare with astroid tree
|
||||
|
||||
indent: string for tree indent representation
|
||||
lineno: bool to tell if we should print the line numbers
|
||||
|
||||
>>> tester = TreeTester('print')
|
||||
>>> print tester.native_tree_repr()
|
||||
|
||||
<Module>
|
||||
. body = [
|
||||
. <Print>
|
||||
. . nl = True
|
||||
. ]
|
||||
>>> print tester.astroid_tree_repr()
|
||||
Module()
|
||||
body = [
|
||||
Print()
|
||||
dest =
|
||||
values = [
|
||||
]
|
||||
]
|
||||
'''
|
||||
|
||||
indent = '. '
|
||||
lineno = False
|
||||
|
||||
def __init__(self, sourcecode):
|
||||
self._string = ''
|
||||
self.sourcecode = sourcecode
|
||||
self._ast_node = None
|
||||
self.build_ast()
|
||||
|
||||
def build_ast(self):
|
||||
"""build the _ast tree from the source code"""
|
||||
self._ast_node = parse(self.sourcecode)
|
||||
|
||||
def native_tree_repr(self, node=None, indent=''):
|
||||
"""get a nice representation of the _ast tree"""
|
||||
self._string = ''
|
||||
if node is None:
|
||||
node = self._ast_node
|
||||
self._native_repr_tree(node, indent)
|
||||
return self._string
|
||||
|
||||
|
||||
def _native_repr_tree(self, node, indent, _done=None):
|
||||
"""recursive method for the native tree representation"""
|
||||
from _ast import Load as _Load, Store as _Store, Del as _Del
|
||||
from _ast import AST as Node
|
||||
if _done is None:
|
||||
_done = set()
|
||||
if node in _done:
|
||||
self._string += '\nloop in tree: %r (%s)' % (
|
||||
node, getattr(node, 'lineno', None))
|
||||
return
|
||||
_done.add(node)
|
||||
self._string += '\n' + indent + '<%s>' % node.__class__.__name__
|
||||
indent += self.indent
|
||||
if not hasattr(node, '__dict__'):
|
||||
self._string += '\n' + self.indent + " ** node has no __dict__ " + str(node)
|
||||
return
|
||||
node_dict = node.__dict__
|
||||
if hasattr(node, '_attributes'):
|
||||
for a in node._attributes:
|
||||
attr = node_dict[a]
|
||||
if attr is None:
|
||||
continue
|
||||
if a in ("lineno", "col_offset") and not self.lineno:
|
||||
continue
|
||||
self._string += '\n' + indent + a + " = " + repr(attr)
|
||||
for field in node._fields or ():
|
||||
attr = node_dict[field]
|
||||
if attr is None:
|
||||
continue
|
||||
if isinstance(attr, list):
|
||||
if not attr:
|
||||
continue
|
||||
self._string += '\n' + indent + field + ' = ['
|
||||
for elt in attr:
|
||||
self._native_repr_tree(elt, indent, _done)
|
||||
self._string += '\n' + indent + ']'
|
||||
continue
|
||||
if isinstance(attr, (_Load, _Store, _Del)):
|
||||
continue
|
||||
if isinstance(attr, Node):
|
||||
self._string += '\n' + indent + field + " = "
|
||||
self._native_repr_tree(attr, indent, _done)
|
||||
else:
|
||||
self._string += '\n' + indent + field + " = " + repr(attr)
|
||||
|
||||
|
||||
def build_astroid_tree(self):
|
||||
"""build astroid tree from the _ast tree
|
||||
"""
|
||||
from astroid.builder import AstroidBuilder
|
||||
tree = AstroidBuilder().string_build(self.sourcecode)
|
||||
return tree
|
||||
|
||||
def astroid_tree_repr(self, ids=False):
|
||||
"""build the astroid tree and return a nice tree representation"""
|
||||
mod = self.build_astroid_tree()
|
||||
return mod.repr_tree(ids)
|
||||
|
||||
|
||||
__all__ = ('LocalsVisitor', 'ASTWalker',)
|
||||
|
||||
Loading…
Add table
Add a link
Reference in a new issue