Adding new stuff
This commit is contained in:
parent
131691e143
commit
6c0a9f5b29
718 changed files with 0 additions and 0 deletions
|
|
@ -1,37 +0,0 @@
|
|||
""" Pymode support functions. """
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import sys
|
||||
import vim # noqa
|
||||
|
||||
|
||||
def auto():
|
||||
""" Fix PEP8 erorrs in current buffer. """
|
||||
from .autopep8 import fix_file
|
||||
|
||||
class Options(object):
|
||||
aggressive = 2
|
||||
diff = False
|
||||
experimental = True
|
||||
ignore = vim.eval('g:pymode_lint_ignore')
|
||||
in_place = True
|
||||
indent_size = int(vim.eval('&tabstop'))
|
||||
line_range = None
|
||||
max_line_length = int(vim.eval('g:pymode_options_max_line_length'))
|
||||
pep8_passes = 100
|
||||
recursive = False
|
||||
select = vim.eval('g:pymode_lint_select')
|
||||
verbose = 0
|
||||
|
||||
fix_file(vim.current.buffer.name, Options)
|
||||
|
||||
|
||||
def get_documentation():
|
||||
""" Search documentation and append to current buffer. """
|
||||
from ._compat import StringIO
|
||||
|
||||
sys.stdout, _ = StringIO(), sys.stdout
|
||||
help(vim.eval('a:word'))
|
||||
sys.stdout, out = _, sys.stdout.getvalue()
|
||||
vim.current.buffer.append(str(out).splitlines(), 0)
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
|
@ -1,98 +0,0 @@
|
|||
""" Compatibility.
|
||||
|
||||
Some py2/py3 compatibility support based on a stripped down
|
||||
version of six so we don't have to depend on a specific version
|
||||
of it.
|
||||
|
||||
:copyright: (c) 2014 by Armin Ronacher.
|
||||
:license: BSD
|
||||
"""
|
||||
import sys
|
||||
|
||||
PY2 = sys.version_info[0] == 2
|
||||
_identity = lambda x: x
|
||||
|
||||
|
||||
if not PY2:
|
||||
text_type = str
|
||||
string_types = (str,)
|
||||
integer_types = (int, )
|
||||
|
||||
iterkeys = lambda d: iter(d.keys())
|
||||
itervalues = lambda d: iter(d.values())
|
||||
iteritems = lambda d: iter(d.items())
|
||||
|
||||
from io import StringIO
|
||||
from queue import Queue # noqa
|
||||
|
||||
def reraise(tp, value, tb=None):
|
||||
if value.__traceback__ is not tb:
|
||||
raise value.with_traceback(tb)
|
||||
raise value
|
||||
|
||||
implements_to_string = _identity
|
||||
|
||||
else:
|
||||
text_type = unicode
|
||||
string_types = (str, unicode)
|
||||
integer_types = (int, long)
|
||||
|
||||
iterkeys = lambda d: d.iterkeys()
|
||||
itervalues = lambda d: d.itervalues()
|
||||
iteritems = lambda d: d.iteritems()
|
||||
|
||||
from cStringIO import StringIO
|
||||
from Queue import Queue
|
||||
|
||||
exec('def reraise(tp, value, tb=None):\n raise tp, value, tb')
|
||||
|
||||
def implements_to_string(cls):
|
||||
cls.__unicode__ = cls.__str__
|
||||
cls.__str__ = lambda x: x.__unicode__().encode('utf-8')
|
||||
return cls
|
||||
|
||||
|
||||
def with_metaclass(meta, *bases):
|
||||
# This requires a bit of explanation: the basic idea is to make a
|
||||
# dummy metaclass for one level of class instantiation that replaces
|
||||
# itself with the actual metaclass. Because of internal type checks
|
||||
# we also need to make sure that we downgrade the custom metaclass
|
||||
# for one level to something closer to type (that's why __call__ and
|
||||
# __init__ comes back from type etc.).
|
||||
#
|
||||
# This has the advantage over six.with_metaclass in that it does not
|
||||
# introduce dummy classes into the final MRO.
|
||||
class metaclass(meta):
|
||||
__call__ = type.__call__
|
||||
__init__ = type.__init__
|
||||
def __new__(cls, name, this_bases, d):
|
||||
if this_bases is None:
|
||||
return type.__new__(cls, name, (), d)
|
||||
return meta(name, bases, d)
|
||||
return metaclass('temporary_class', None, {})
|
||||
|
||||
|
||||
# Certain versions of pypy have a bug where clearing the exception stack
|
||||
# breaks the __exit__ function in a very peculiar way. This is currently
|
||||
# true for pypy 2.2.1 for instance. The second level of exception blocks
|
||||
# is necessary because pypy seems to forget to check if an exception
|
||||
# happend until the next bytecode instruction?
|
||||
BROKEN_PYPY_CTXMGR_EXIT = False
|
||||
if hasattr(sys, 'pypy_version_info'):
|
||||
class _Mgr(object):
|
||||
def __enter__(self):
|
||||
return self
|
||||
def __exit__(self, *args):
|
||||
sys.exc_clear()
|
||||
try:
|
||||
try:
|
||||
with _Mgr():
|
||||
raise AssertionError()
|
||||
except:
|
||||
raise
|
||||
except TypeError:
|
||||
BROKEN_PYPY_CTXMGR_EXIT = True
|
||||
except AssertionError:
|
||||
pass
|
||||
|
||||
# pylama:skip=1
|
||||
|
|
@ -1,6 +0,0 @@
|
|||
""" Python-mode async support. """
|
||||
|
||||
from ._compat import Queue
|
||||
|
||||
|
||||
RESULTS = Queue()
|
||||
File diff suppressed because it is too large
Load diff
|
|
@ -1,249 +0,0 @@
|
|||
"""Define interfaces."""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import json
|
||||
import os.path
|
||||
import time
|
||||
import vim # noqa
|
||||
|
||||
from ._compat import PY2
|
||||
|
||||
|
||||
class VimPymodeEnviroment(object):
|
||||
|
||||
"""Vim User interface."""
|
||||
|
||||
prefix = '[Pymode]'
|
||||
|
||||
def __init__(self):
|
||||
"""Init VIM environment."""
|
||||
self.current = vim.current
|
||||
self.options = dict(encoding=vim.eval('&enc'))
|
||||
self.options['debug'] = self.var('g:pymode_debug', True)
|
||||
|
||||
@property
|
||||
def curdir(self):
|
||||
"""Return current working directory."""
|
||||
return self.var('getcwd()')
|
||||
|
||||
@property
|
||||
def curbuf(self):
|
||||
"""Return current buffer."""
|
||||
return self.current.buffer
|
||||
|
||||
@property
|
||||
def cursor(self):
|
||||
"""Return current window position.
|
||||
|
||||
:return tuple: (row, col)
|
||||
|
||||
"""
|
||||
return self.current.window.cursor
|
||||
|
||||
@property
|
||||
def source(self):
|
||||
"""Return source of current buffer."""
|
||||
return "\n".join(self.lines)
|
||||
|
||||
@property
|
||||
def lines(self):
|
||||
"""Iterate by lines in current file.
|
||||
|
||||
:return list:
|
||||
|
||||
"""
|
||||
if not PY2:
|
||||
return self.curbuf
|
||||
|
||||
return [l.decode(self.options.get('encoding')) for l in self.curbuf]
|
||||
|
||||
@staticmethod
|
||||
def var(name, to_bool=False, silence=False):
|
||||
"""Get vim variable.
|
||||
|
||||
:return vimobj:
|
||||
|
||||
"""
|
||||
try:
|
||||
value = vim.eval(name)
|
||||
except vim.error:
|
||||
if silence:
|
||||
return None
|
||||
raise
|
||||
|
||||
if to_bool:
|
||||
try:
|
||||
value = bool(int(value))
|
||||
except ValueError:
|
||||
value = value
|
||||
return value
|
||||
|
||||
@staticmethod
|
||||
def message(msg, history=False):
|
||||
"""Show message to user.
|
||||
|
||||
:return: :None
|
||||
|
||||
"""
|
||||
if history:
|
||||
return vim.command('echom "%s"' % str(msg))
|
||||
|
||||
return vim.command('call pymode#wide_message("%s")' % str(msg))
|
||||
|
||||
def user_input(self, msg, default=''):
|
||||
"""Return user input or default.
|
||||
|
||||
:return str:
|
||||
|
||||
"""
|
||||
msg = '%s %s ' % (self.prefix, msg)
|
||||
|
||||
if default != '':
|
||||
msg += '[%s] ' % default
|
||||
|
||||
try:
|
||||
vim.command('echohl Debug')
|
||||
input_str = vim.eval('input("%s> ")' % msg)
|
||||
vim.command('echohl none')
|
||||
except KeyboardInterrupt:
|
||||
input_str = ''
|
||||
|
||||
return input_str or default
|
||||
|
||||
def user_confirm(self, msg, yes=False):
|
||||
"""Get user confirmation.
|
||||
|
||||
:return bool:
|
||||
|
||||
"""
|
||||
default = 'yes' if yes else 'no'
|
||||
action = self.user_input(msg, default)
|
||||
return action and 'yes'.startswith(action)
|
||||
|
||||
def user_input_choices(self, msg, *options):
|
||||
"""Get one of many options.
|
||||
|
||||
:return str: A choosen option
|
||||
|
||||
"""
|
||||
choices = ['%s %s' % (self.prefix, msg)]
|
||||
choices += [
|
||||
"%s. %s" % (num, opt) for num, opt in enumerate(options, 1)]
|
||||
try:
|
||||
input_str = int(
|
||||
vim.eval('inputlist(%s)' % self.prepare_value(choices)))
|
||||
except (KeyboardInterrupt, ValueError):
|
||||
input_str = 0
|
||||
|
||||
if not input_str:
|
||||
self.message('Cancelled!')
|
||||
return False
|
||||
|
||||
try:
|
||||
return options[input_str - 1]
|
||||
except (IndexError, ValueError):
|
||||
self.error('Invalid option: %s' % input_str)
|
||||
return self.user_input_choices(msg, *options)
|
||||
|
||||
@staticmethod
|
||||
def error(msg):
|
||||
"""Show error to user."""
|
||||
vim.command('call pymode#error("%s")' % str(msg))
|
||||
|
||||
def debug(self, msg, *args):
|
||||
"""Print debug information."""
|
||||
if self.options.get('debug'):
|
||||
print("%s %s [%s]" % (
|
||||
int(time.time()), msg, ', '.join([str(a) for a in args])))
|
||||
|
||||
def stop(self, value=None):
|
||||
"""Break Vim function."""
|
||||
cmd = 'return'
|
||||
if value is not None:
|
||||
cmd += ' ' + self.prepare_value(value)
|
||||
vim.command(cmd)
|
||||
|
||||
def catch_exceptions(self, func):
|
||||
"""Decorator. Make execution more silence.
|
||||
|
||||
:return func:
|
||||
|
||||
"""
|
||||
def _wrapper(*args, **kwargs):
|
||||
try:
|
||||
return func(*args, **kwargs)
|
||||
except (Exception, vim.error) as e: # noqa
|
||||
if self.options.get('debug'):
|
||||
raise
|
||||
self.error(e)
|
||||
return None
|
||||
return _wrapper
|
||||
|
||||
def run(self, name, *args):
|
||||
"""Run vim function."""
|
||||
vim.command('call %s(%s)' % (name, ", ".join([
|
||||
self.prepare_value(a) for a in args
|
||||
])))
|
||||
|
||||
def let(self, name, value):
|
||||
"""Set variable."""
|
||||
cmd = 'let %s = %s' % (name, self.prepare_value(value))
|
||||
self.debug(cmd)
|
||||
vim.command(cmd)
|
||||
|
||||
def prepare_value(self, value, dumps=True):
|
||||
"""Decode bstr to vim encoding.
|
||||
|
||||
:return unicode string:
|
||||
|
||||
"""
|
||||
if dumps:
|
||||
value = json.dumps(value)
|
||||
|
||||
if PY2:
|
||||
value = value.decode('utf-8').encode(self.options.get('encoding'))
|
||||
|
||||
return value
|
||||
|
||||
def get_offset_params(self, cursor=None, base=""):
|
||||
"""Calculate current offset.
|
||||
|
||||
:return tuple: (source, offset)
|
||||
|
||||
"""
|
||||
row, col = cursor or env.cursor
|
||||
source = ""
|
||||
offset = 0
|
||||
for i, line in enumerate(self.lines, 1):
|
||||
if i == row:
|
||||
source += line[:col] + base
|
||||
offset = len(source)
|
||||
source += line[col:]
|
||||
else:
|
||||
source += line
|
||||
source += '\n'
|
||||
env.debug('Get offset', base or None, row, col, offset)
|
||||
return source, offset
|
||||
|
||||
@staticmethod
|
||||
def goto_line(line):
|
||||
"""Go to line."""
|
||||
vim.command('normal %sggzz' % line)
|
||||
|
||||
def goto_file(self, path, cmd='e', force=False):
|
||||
"""Open file by path."""
|
||||
if force or os.path.abspath(path) != self.curbuf.name:
|
||||
self.debug('read', path)
|
||||
if ' ' in path and os.name == 'posix':
|
||||
path = path.replace(' ', '\\ ')
|
||||
vim.command("%s %s" % (cmd, path))
|
||||
|
||||
@staticmethod
|
||||
def goto_buffer(bufnr):
|
||||
"""Open buffer."""
|
||||
if str(bufnr) != '-1':
|
||||
vim.command('buffer %s' % bufnr)
|
||||
|
||||
|
||||
env = VimPymodeEnviroment()
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
|
@ -1,16 +0,0 @@
|
|||
try:
|
||||
import ast
|
||||
from _markerlib.markers import default_environment, compile, interpret
|
||||
except ImportError:
|
||||
if 'ast' in globals():
|
||||
raise
|
||||
def default_environment():
|
||||
return {}
|
||||
def compile(marker):
|
||||
def marker_fn(environment=None, override=None):
|
||||
# 'empty markers are True' heuristic won't install extra deps.
|
||||
return not marker.strip()
|
||||
marker_fn.__doc__ = marker
|
||||
return marker_fn
|
||||
def interpret(marker, environment=None, override=None):
|
||||
return compile(marker)()
|
||||
|
|
@ -1,119 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Interpret PEP 345 environment markers.
|
||||
|
||||
EXPR [in|==|!=|not in] EXPR [or|and] ...
|
||||
|
||||
where EXPR belongs to any of those:
|
||||
|
||||
python_version = '%s.%s' % (sys.version_info[0], sys.version_info[1])
|
||||
python_full_version = sys.version.split()[0]
|
||||
os.name = os.name
|
||||
sys.platform = sys.platform
|
||||
platform.version = platform.version()
|
||||
platform.machine = platform.machine()
|
||||
platform.python_implementation = platform.python_implementation()
|
||||
a free string, like '2.6', or 'win32'
|
||||
"""
|
||||
|
||||
__all__ = ['default_environment', 'compile', 'interpret']
|
||||
|
||||
import ast
|
||||
import os
|
||||
import platform
|
||||
import sys
|
||||
import weakref
|
||||
|
||||
_builtin_compile = compile
|
||||
|
||||
try:
|
||||
from platform import python_implementation
|
||||
except ImportError:
|
||||
if os.name == "java":
|
||||
# Jython 2.5 has ast module, but not platform.python_implementation() function.
|
||||
def python_implementation():
|
||||
return "Jython"
|
||||
else:
|
||||
raise
|
||||
|
||||
|
||||
# restricted set of variables
|
||||
_VARS = {'sys.platform': sys.platform,
|
||||
'python_version': '%s.%s' % sys.version_info[:2],
|
||||
# FIXME parsing sys.platform is not reliable, but there is no other
|
||||
# way to get e.g. 2.7.2+, and the PEP is defined with sys.version
|
||||
'python_full_version': sys.version.split(' ', 1)[0],
|
||||
'os.name': os.name,
|
||||
'platform.version': platform.version(),
|
||||
'platform.machine': platform.machine(),
|
||||
'platform.python_implementation': python_implementation(),
|
||||
'extra': None # wheel extension
|
||||
}
|
||||
|
||||
for var in list(_VARS.keys()):
|
||||
if '.' in var:
|
||||
_VARS[var.replace('.', '_')] = _VARS[var]
|
||||
|
||||
def default_environment():
|
||||
"""Return copy of default PEP 385 globals dictionary."""
|
||||
return dict(_VARS)
|
||||
|
||||
class ASTWhitelist(ast.NodeTransformer):
|
||||
def __init__(self, statement):
|
||||
self.statement = statement # for error messages
|
||||
|
||||
ALLOWED = (ast.Compare, ast.BoolOp, ast.Attribute, ast.Name, ast.Load, ast.Str)
|
||||
# Bool operations
|
||||
ALLOWED += (ast.And, ast.Or)
|
||||
# Comparison operations
|
||||
ALLOWED += (ast.Eq, ast.Gt, ast.GtE, ast.In, ast.Is, ast.IsNot, ast.Lt, ast.LtE, ast.NotEq, ast.NotIn)
|
||||
|
||||
def visit(self, node):
|
||||
"""Ensure statement only contains allowed nodes."""
|
||||
if not isinstance(node, self.ALLOWED):
|
||||
raise SyntaxError('Not allowed in environment markers.\n%s\n%s' %
|
||||
(self.statement,
|
||||
(' ' * node.col_offset) + '^'))
|
||||
return ast.NodeTransformer.visit(self, node)
|
||||
|
||||
def visit_Attribute(self, node):
|
||||
"""Flatten one level of attribute access."""
|
||||
new_node = ast.Name("%s.%s" % (node.value.id, node.attr), node.ctx)
|
||||
return ast.copy_location(new_node, node)
|
||||
|
||||
def parse_marker(marker):
|
||||
tree = ast.parse(marker, mode='eval')
|
||||
new_tree = ASTWhitelist(marker).generic_visit(tree)
|
||||
return new_tree
|
||||
|
||||
def compile_marker(parsed_marker):
|
||||
return _builtin_compile(parsed_marker, '<environment marker>', 'eval',
|
||||
dont_inherit=True)
|
||||
|
||||
_cache = weakref.WeakValueDictionary()
|
||||
|
||||
def compile(marker):
|
||||
"""Return compiled marker as a function accepting an environment dict."""
|
||||
try:
|
||||
return _cache[marker]
|
||||
except KeyError:
|
||||
pass
|
||||
if not marker.strip():
|
||||
def marker_fn(environment=None, override=None):
|
||||
""""""
|
||||
return True
|
||||
else:
|
||||
compiled_marker = compile_marker(parse_marker(marker))
|
||||
def marker_fn(environment=None, override=None):
|
||||
"""override updates environment"""
|
||||
if override is None:
|
||||
override = {}
|
||||
if environment is None:
|
||||
environment = default_environment()
|
||||
environment.update(override)
|
||||
return eval(compiled_marker, environment)
|
||||
marker_fn.__doc__ = marker
|
||||
_cache[marker] = marker_fn
|
||||
return _cache[marker]
|
||||
|
||||
def interpret(marker, environment=None):
|
||||
return compile(marker)(environment)
|
||||
|
|
@ -1,131 +0,0 @@
|
|||
# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
|
||||
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
|
||||
#
|
||||
# This file is part of astroid.
|
||||
#
|
||||
# astroid is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by the
|
||||
# Free Software Foundation, either version 2.1 of the License, or (at your
|
||||
# option) any later version.
|
||||
#
|
||||
# astroid is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
|
||||
# for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License along
|
||||
# with astroid. If not, see <http://www.gnu.org/licenses/>.
|
||||
"""Python Abstract Syntax Tree New Generation
|
||||
|
||||
The aim of this module is to provide a common base representation of
|
||||
python source code for projects such as pychecker, pyreverse,
|
||||
pylint... Well, actually the development of this library is essentially
|
||||
governed by pylint's needs.
|
||||
|
||||
It extends class defined in the python's _ast module with some
|
||||
additional methods and attributes. Instance attributes are added by a
|
||||
builder object, which can either generate extended ast (let's call
|
||||
them astroid ;) by visiting an existent ast tree or by inspecting living
|
||||
object. Methods are added by monkey patching ast classes.
|
||||
|
||||
Main modules are:
|
||||
|
||||
* nodes and scoped_nodes for more information about methods and
|
||||
attributes added to different node classes
|
||||
|
||||
* the manager contains a high level object to get astroid trees from
|
||||
source files and living objects. It maintains a cache of previously
|
||||
constructed tree for quick access
|
||||
|
||||
* builder contains the class responsible to build astroid trees
|
||||
"""
|
||||
__doctype__ = "restructuredtext en"
|
||||
|
||||
import sys
|
||||
import re
|
||||
from operator import attrgetter
|
||||
|
||||
# WARNING: internal imports order matters !
|
||||
|
||||
# make all exception classes accessible from astroid package
|
||||
from astroid.exceptions import *
|
||||
|
||||
# make all node classes accessible from astroid package
|
||||
from astroid.nodes import *
|
||||
|
||||
# trigger extra monkey-patching
|
||||
from astroid import inference
|
||||
|
||||
# more stuff available
|
||||
from astroid import raw_building
|
||||
from astroid.bases import YES, Instance, BoundMethod, UnboundMethod
|
||||
from astroid.node_classes import are_exclusive, unpack_infer
|
||||
from astroid.scoped_nodes import builtin_lookup
|
||||
|
||||
# make a manager instance (borg) as well as Project and Package classes
|
||||
# accessible from astroid package
|
||||
from astroid.manager import AstroidManager, Project
|
||||
MANAGER = AstroidManager()
|
||||
del AstroidManager
|
||||
|
||||
# transform utilities (filters and decorator)
|
||||
|
||||
class AsStringRegexpPredicate(object):
|
||||
"""Class to be used as predicate that may be given to `register_transform`
|
||||
|
||||
First argument is a regular expression that will be searched against the `as_string`
|
||||
representation of the node onto which it's applied.
|
||||
|
||||
If specified, the second argument is an `attrgetter` expression that will be
|
||||
applied on the node first to get the actual node on which `as_string` should
|
||||
be called.
|
||||
|
||||
WARNING: This can be fairly slow, as it has to convert every AST node back
|
||||
to Python code; you should consider examining the AST directly instead.
|
||||
"""
|
||||
def __init__(self, regexp, expression=None):
|
||||
self.regexp = re.compile(regexp)
|
||||
self.expression = expression
|
||||
|
||||
def __call__(self, node):
|
||||
if self.expression is not None:
|
||||
node = attrgetter(self.expression)(node)
|
||||
return self.regexp.search(node.as_string())
|
||||
|
||||
def inference_tip(infer_function):
|
||||
"""Given an instance specific inference function, return a function to be
|
||||
given to MANAGER.register_transform to set this inference function.
|
||||
|
||||
Typical usage
|
||||
|
||||
.. sourcecode:: python
|
||||
|
||||
MANAGER.register_transform(CallFunc, inference_tip(infer_named_tuple),
|
||||
predicate)
|
||||
"""
|
||||
def transform(node, infer_function=infer_function):
|
||||
node._explicit_inference = infer_function
|
||||
return node
|
||||
return transform
|
||||
|
||||
|
||||
def register_module_extender(manager, module_name, get_extension_mod):
|
||||
def transform(node):
|
||||
extension_module = get_extension_mod()
|
||||
for name, obj in extension_module.locals.items():
|
||||
node.locals[name] = obj
|
||||
|
||||
manager.register_transform(Module, transform, lambda n: n.name == module_name)
|
||||
|
||||
|
||||
# load brain plugins
|
||||
from os import listdir
|
||||
from os.path import join, dirname
|
||||
BRAIN_MODULES_DIR = join(dirname(__file__), 'brain')
|
||||
if BRAIN_MODULES_DIR not in sys.path:
|
||||
# add it to the end of the list so user path take precedence
|
||||
sys.path.append(BRAIN_MODULES_DIR)
|
||||
# load modules in this directory
|
||||
for module in listdir(BRAIN_MODULES_DIR):
|
||||
if module.endswith('.py'):
|
||||
__import__(module[:-3])
|
||||
|
|
@ -1,42 +0,0 @@
|
|||
# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
|
||||
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
|
||||
#
|
||||
# This file is part of astroid.
|
||||
#
|
||||
# astroid is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by the
|
||||
# Free Software Foundation, either version 2.1 of the License, or (at your
|
||||
# option) any later version.
|
||||
#
|
||||
# astroid is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
|
||||
# for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License along
|
||||
# with astroid. If not, see <http://www.gnu.org/licenses/>.
|
||||
"""astroid packaging information"""
|
||||
distname = 'astroid'
|
||||
|
||||
modname = 'astroid'
|
||||
|
||||
numversion = (1, 3, 8)
|
||||
version = '.'.join([str(num) for num in numversion])
|
||||
|
||||
install_requires = ['logilab-common>=0.63.0', 'six']
|
||||
|
||||
license = 'LGPL'
|
||||
|
||||
author = 'Logilab'
|
||||
author_email = 'pylint-dev@lists.logilab.org'
|
||||
mailinglist = "mailto://%s" % author_email
|
||||
web = 'http://bitbucket.org/logilab/astroid'
|
||||
|
||||
description = "A abstract syntax tree for Python with inference support."
|
||||
|
||||
classifiers = ["Topic :: Software Development :: Libraries :: Python Modules",
|
||||
"Topic :: Software Development :: Quality Assurance",
|
||||
"Programming Language :: Python",
|
||||
"Programming Language :: Python :: 2",
|
||||
"Programming Language :: Python :: 3",
|
||||
]
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
|
@ -1,499 +0,0 @@
|
|||
# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
|
||||
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
|
||||
#
|
||||
# This file is part of astroid.
|
||||
#
|
||||
# astroid is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by the
|
||||
# Free Software Foundation, either version 2.1 of the License, or (at your
|
||||
# option) any later version.
|
||||
#
|
||||
# astroid is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
|
||||
# for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License along
|
||||
# with astroid. If not, see <http://www.gnu.org/licenses/>.
|
||||
"""This module renders Astroid nodes as string:
|
||||
|
||||
* :func:`to_code` function return equivalent (hopefuly valid) python string
|
||||
|
||||
* :func:`dump` function return an internal representation of nodes found
|
||||
in the tree, useful for debugging or understanding the tree structure
|
||||
"""
|
||||
|
||||
import sys
|
||||
|
||||
INDENT = ' ' # 4 spaces ; keep indentation variable
|
||||
|
||||
|
||||
def dump(node, ids=False):
|
||||
"""print a nice astroid tree representation.
|
||||
|
||||
:param ids: if true, we also print the ids (usefull for debugging)
|
||||
"""
|
||||
result = []
|
||||
_repr_tree(node, result, ids=ids)
|
||||
return "\n".join(result)
|
||||
|
||||
def _repr_tree(node, result, indent='', _done=None, ids=False):
|
||||
"""built a tree representation of a node as a list of lines"""
|
||||
if _done is None:
|
||||
_done = set()
|
||||
if not hasattr(node, '_astroid_fields'): # not a astroid node
|
||||
return
|
||||
if node in _done:
|
||||
result.append(indent + 'loop in tree: %s' % node)
|
||||
return
|
||||
_done.add(node)
|
||||
node_str = str(node)
|
||||
if ids:
|
||||
node_str += ' . \t%x' % id(node)
|
||||
result.append(indent + node_str)
|
||||
indent += INDENT
|
||||
for field in node._astroid_fields:
|
||||
value = getattr(node, field)
|
||||
if isinstance(value, (list, tuple)):
|
||||
result.append(indent + field + " = [")
|
||||
for child in value:
|
||||
if isinstance(child, (list, tuple)):
|
||||
# special case for Dict # FIXME
|
||||
_repr_tree(child[0], result, indent, _done, ids)
|
||||
_repr_tree(child[1], result, indent, _done, ids)
|
||||
result.append(indent + ',')
|
||||
else:
|
||||
_repr_tree(child, result, indent, _done, ids)
|
||||
result.append(indent + "]")
|
||||
else:
|
||||
result.append(indent + field + " = ")
|
||||
_repr_tree(value, result, indent, _done, ids)
|
||||
|
||||
|
||||
class AsStringVisitor(object):
|
||||
"""Visitor to render an Astroid node as a valid python code string"""
|
||||
|
||||
def __call__(self, node):
|
||||
"""Makes this visitor behave as a simple function"""
|
||||
return node.accept(self)
|
||||
|
||||
def _stmt_list(self, stmts):
|
||||
"""return a list of nodes to string"""
|
||||
stmts = '\n'.join([nstr for nstr in [n.accept(self) for n in stmts] if nstr])
|
||||
return INDENT + stmts.replace('\n', '\n'+INDENT)
|
||||
|
||||
|
||||
## visit_<node> methods ###########################################
|
||||
|
||||
def visit_arguments(self, node):
|
||||
"""return an astroid.Function node as string"""
|
||||
return node.format_args()
|
||||
|
||||
def visit_assattr(self, node):
|
||||
"""return an astroid.AssAttr node as string"""
|
||||
return self.visit_getattr(node)
|
||||
|
||||
def visit_assert(self, node):
|
||||
"""return an astroid.Assert node as string"""
|
||||
if node.fail:
|
||||
return 'assert %s, %s' % (node.test.accept(self),
|
||||
node.fail.accept(self))
|
||||
return 'assert %s' % node.test.accept(self)
|
||||
|
||||
def visit_assname(self, node):
|
||||
"""return an astroid.AssName node as string"""
|
||||
return node.name
|
||||
|
||||
def visit_assign(self, node):
|
||||
"""return an astroid.Assign node as string"""
|
||||
lhs = ' = '.join([n.accept(self) for n in node.targets])
|
||||
return '%s = %s' % (lhs, node.value.accept(self))
|
||||
|
||||
def visit_augassign(self, node):
|
||||
"""return an astroid.AugAssign node as string"""
|
||||
return '%s %s %s' % (node.target.accept(self), node.op, node.value.accept(self))
|
||||
|
||||
def visit_backquote(self, node):
|
||||
"""return an astroid.Backquote node as string"""
|
||||
return '`%s`' % node.value.accept(self)
|
||||
|
||||
def visit_binop(self, node):
|
||||
"""return an astroid.BinOp node as string"""
|
||||
return '(%s) %s (%s)' % (node.left.accept(self), node.op, node.right.accept(self))
|
||||
|
||||
def visit_boolop(self, node):
|
||||
"""return an astroid.BoolOp node as string"""
|
||||
return (' %s ' % node.op).join(['(%s)' % n.accept(self)
|
||||
for n in node.values])
|
||||
|
||||
def visit_break(self, node):
|
||||
"""return an astroid.Break node as string"""
|
||||
return 'break'
|
||||
|
||||
def visit_callfunc(self, node):
|
||||
"""return an astroid.CallFunc node as string"""
|
||||
expr_str = node.func.accept(self)
|
||||
args = [arg.accept(self) for arg in node.args]
|
||||
if node.starargs:
|
||||
args.append('*' + node.starargs.accept(self))
|
||||
if node.kwargs:
|
||||
args.append('**' + node.kwargs.accept(self))
|
||||
return '%s(%s)' % (expr_str, ', '.join(args))
|
||||
|
||||
def visit_class(self, node):
|
||||
"""return an astroid.Class node as string"""
|
||||
decorate = node.decorators and node.decorators.accept(self) or ''
|
||||
bases = ', '.join([n.accept(self) for n in node.bases])
|
||||
if sys.version_info[0] == 2:
|
||||
bases = bases and '(%s)' % bases or ''
|
||||
else:
|
||||
metaclass = node.metaclass()
|
||||
if metaclass and not node.has_metaclass_hack():
|
||||
if bases:
|
||||
bases = '(%s, metaclass=%s)' % (bases, metaclass.name)
|
||||
else:
|
||||
bases = '(metaclass=%s)' % metaclass.name
|
||||
else:
|
||||
bases = bases and '(%s)' % bases or ''
|
||||
docs = node.doc and '\n%s"""%s"""' % (INDENT, node.doc) or ''
|
||||
return '\n\n%sclass %s%s:%s\n%s\n' % (decorate, node.name, bases, docs,
|
||||
self._stmt_list(node.body))
|
||||
|
||||
def visit_compare(self, node):
|
||||
"""return an astroid.Compare node as string"""
|
||||
rhs_str = ' '.join(['%s %s' % (op, expr.accept(self))
|
||||
for op, expr in node.ops])
|
||||
return '%s %s' % (node.left.accept(self), rhs_str)
|
||||
|
||||
def visit_comprehension(self, node):
|
||||
"""return an astroid.Comprehension node as string"""
|
||||
ifs = ''.join([' if %s' % n.accept(self) for n in node.ifs])
|
||||
return 'for %s in %s%s' % (node.target.accept(self),
|
||||
node.iter.accept(self), ifs)
|
||||
|
||||
def visit_const(self, node):
|
||||
"""return an astroid.Const node as string"""
|
||||
return repr(node.value)
|
||||
|
||||
def visit_continue(self, node):
|
||||
"""return an astroid.Continue node as string"""
|
||||
return 'continue'
|
||||
|
||||
def visit_delete(self, node): # XXX check if correct
|
||||
"""return an astroid.Delete node as string"""
|
||||
return 'del %s' % ', '.join([child.accept(self)
|
||||
for child in node.targets])
|
||||
|
||||
def visit_delattr(self, node):
|
||||
"""return an astroid.DelAttr node as string"""
|
||||
return self.visit_getattr(node)
|
||||
|
||||
def visit_delname(self, node):
|
||||
"""return an astroid.DelName node as string"""
|
||||
return node.name
|
||||
|
||||
def visit_decorators(self, node):
|
||||
"""return an astroid.Decorators node as string"""
|
||||
return '@%s\n' % '\n@'.join([item.accept(self) for item in node.nodes])
|
||||
|
||||
def visit_dict(self, node):
|
||||
"""return an astroid.Dict node as string"""
|
||||
return '{%s}' % ', '.join(['%s: %s' % (key.accept(self),
|
||||
value.accept(self))
|
||||
for key, value in node.items])
|
||||
|
||||
def visit_dictcomp(self, node):
|
||||
"""return an astroid.DictComp node as string"""
|
||||
return '{%s: %s %s}' % (node.key.accept(self), node.value.accept(self),
|
||||
' '.join([n.accept(self) for n in node.generators]))
|
||||
|
||||
def visit_discard(self, node):
|
||||
"""return an astroid.Discard node as string"""
|
||||
return node.value.accept(self)
|
||||
|
||||
def visit_emptynode(self, node):
|
||||
"""dummy method for visiting an Empty node"""
|
||||
return ''
|
||||
|
||||
def visit_excepthandler(self, node):
|
||||
if node.type:
|
||||
if node.name:
|
||||
excs = 'except %s, %s' % (node.type.accept(self),
|
||||
node.name.accept(self))
|
||||
else:
|
||||
excs = 'except %s' % node.type.accept(self)
|
||||
else:
|
||||
excs = 'except'
|
||||
return '%s:\n%s' % (excs, self._stmt_list(node.body))
|
||||
|
||||
def visit_ellipsis(self, node):
|
||||
"""return an astroid.Ellipsis node as string"""
|
||||
return '...'
|
||||
|
||||
def visit_empty(self, node):
|
||||
"""return an Empty node as string"""
|
||||
return ''
|
||||
|
||||
def visit_exec(self, node):
|
||||
"""return an astroid.Exec node as string"""
|
||||
if node.locals:
|
||||
return 'exec %s in %s, %s' % (node.expr.accept(self),
|
||||
node.locals.accept(self),
|
||||
node.globals.accept(self))
|
||||
if node.globals:
|
||||
return 'exec %s in %s' % (node.expr.accept(self),
|
||||
node.globals.accept(self))
|
||||
return 'exec %s' % node.expr.accept(self)
|
||||
|
||||
def visit_extslice(self, node):
|
||||
"""return an astroid.ExtSlice node as string"""
|
||||
return ','.join([dim.accept(self) for dim in node.dims])
|
||||
|
||||
def visit_for(self, node):
|
||||
"""return an astroid.For node as string"""
|
||||
fors = 'for %s in %s:\n%s' % (node.target.accept(self),
|
||||
node.iter.accept(self),
|
||||
self._stmt_list(node.body))
|
||||
if node.orelse:
|
||||
fors = '%s\nelse:\n%s' % (fors, self._stmt_list(node.orelse))
|
||||
return fors
|
||||
|
||||
def visit_from(self, node):
|
||||
"""return an astroid.From node as string"""
|
||||
return 'from %s import %s' % ('.' * (node.level or 0) + node.modname,
|
||||
_import_string(node.names))
|
||||
|
||||
def visit_function(self, node):
|
||||
"""return an astroid.Function node as string"""
|
||||
decorate = node.decorators and node.decorators.accept(self) or ''
|
||||
docs = node.doc and '\n%s"""%s"""' % (INDENT, node.doc) or ''
|
||||
return '\n%sdef %s(%s):%s\n%s' % (decorate, node.name, node.args.accept(self),
|
||||
docs, self._stmt_list(node.body))
|
||||
|
||||
def visit_genexpr(self, node):
|
||||
"""return an astroid.GenExpr node as string"""
|
||||
return '(%s %s)' % (node.elt.accept(self),
|
||||
' '.join([n.accept(self) for n in node.generators]))
|
||||
|
||||
def visit_getattr(self, node):
|
||||
"""return an astroid.Getattr node as string"""
|
||||
return '%s.%s' % (node.expr.accept(self), node.attrname)
|
||||
|
||||
def visit_global(self, node):
|
||||
"""return an astroid.Global node as string"""
|
||||
return 'global %s' % ', '.join(node.names)
|
||||
|
||||
def visit_if(self, node):
|
||||
"""return an astroid.If node as string"""
|
||||
ifs = ['if %s:\n%s' % (node.test.accept(self), self._stmt_list(node.body))]
|
||||
if node.orelse:# XXX use elif ???
|
||||
ifs.append('else:\n%s' % self._stmt_list(node.orelse))
|
||||
return '\n'.join(ifs)
|
||||
|
||||
def visit_ifexp(self, node):
|
||||
"""return an astroid.IfExp node as string"""
|
||||
return '%s if %s else %s' % (node.body.accept(self),
|
||||
node.test.accept(self),
|
||||
node.orelse.accept(self))
|
||||
|
||||
def visit_import(self, node):
|
||||
"""return an astroid.Import node as string"""
|
||||
return 'import %s' % _import_string(node.names)
|
||||
|
||||
def visit_keyword(self, node):
|
||||
"""return an astroid.Keyword node as string"""
|
||||
return '%s=%s' % (node.arg, node.value.accept(self))
|
||||
|
||||
def visit_lambda(self, node):
|
||||
"""return an astroid.Lambda node as string"""
|
||||
return 'lambda %s: %s' % (node.args.accept(self),
|
||||
node.body.accept(self))
|
||||
|
||||
def visit_list(self, node):
|
||||
"""return an astroid.List node as string"""
|
||||
return '[%s]' % ', '.join([child.accept(self) for child in node.elts])
|
||||
|
||||
def visit_listcomp(self, node):
|
||||
"""return an astroid.ListComp node as string"""
|
||||
return '[%s %s]' % (node.elt.accept(self),
|
||||
' '.join([n.accept(self) for n in node.generators]))
|
||||
|
||||
def visit_module(self, node):
|
||||
"""return an astroid.Module node as string"""
|
||||
docs = node.doc and '"""%s"""\n\n' % node.doc or ''
|
||||
return docs + '\n'.join([n.accept(self) for n in node.body]) + '\n\n'
|
||||
|
||||
def visit_name(self, node):
|
||||
"""return an astroid.Name node as string"""
|
||||
return node.name
|
||||
|
||||
def visit_pass(self, node):
|
||||
"""return an astroid.Pass node as string"""
|
||||
return 'pass'
|
||||
|
||||
def visit_print(self, node):
|
||||
"""return an astroid.Print node as string"""
|
||||
nodes = ', '.join([n.accept(self) for n in node.values])
|
||||
if not node.nl:
|
||||
nodes = '%s,' % nodes
|
||||
if node.dest:
|
||||
return 'print >> %s, %s' % (node.dest.accept(self), nodes)
|
||||
return 'print %s' % nodes
|
||||
|
||||
def visit_raise(self, node):
|
||||
"""return an astroid.Raise node as string"""
|
||||
if node.exc:
|
||||
if node.inst:
|
||||
if node.tback:
|
||||
return 'raise %s, %s, %s' % (node.exc.accept(self),
|
||||
node.inst.accept(self),
|
||||
node.tback.accept(self))
|
||||
return 'raise %s, %s' % (node.exc.accept(self),
|
||||
node.inst.accept(self))
|
||||
return 'raise %s' % node.exc.accept(self)
|
||||
return 'raise'
|
||||
|
||||
def visit_return(self, node):
|
||||
"""return an astroid.Return node as string"""
|
||||
if node.value:
|
||||
return 'return %s' % node.value.accept(self)
|
||||
else:
|
||||
return 'return'
|
||||
|
||||
def visit_index(self, node):
|
||||
"""return a astroid.Index node as string"""
|
||||
return node.value.accept(self)
|
||||
|
||||
def visit_set(self, node):
|
||||
"""return an astroid.Set node as string"""
|
||||
return '{%s}' % ', '.join([child.accept(self) for child in node.elts])
|
||||
|
||||
def visit_setcomp(self, node):
|
||||
"""return an astroid.SetComp node as string"""
|
||||
return '{%s %s}' % (node.elt.accept(self),
|
||||
' '.join([n.accept(self) for n in node.generators]))
|
||||
|
||||
def visit_slice(self, node):
|
||||
"""return a astroid.Slice node as string"""
|
||||
lower = node.lower and node.lower.accept(self) or ''
|
||||
upper = node.upper and node.upper.accept(self) or ''
|
||||
step = node.step and node.step.accept(self) or ''
|
||||
if step:
|
||||
return '%s:%s:%s' % (lower, upper, step)
|
||||
return '%s:%s' % (lower, upper)
|
||||
|
||||
def visit_subscript(self, node):
|
||||
"""return an astroid.Subscript node as string"""
|
||||
return '%s[%s]' % (node.value.accept(self), node.slice.accept(self))
|
||||
|
||||
def visit_tryexcept(self, node):
|
||||
"""return an astroid.TryExcept node as string"""
|
||||
trys = ['try:\n%s' % self._stmt_list(node.body)]
|
||||
for handler in node.handlers:
|
||||
trys.append(handler.accept(self))
|
||||
if node.orelse:
|
||||
trys.append('else:\n%s' % self._stmt_list(node.orelse))
|
||||
return '\n'.join(trys)
|
||||
|
||||
def visit_tryfinally(self, node):
|
||||
"""return an astroid.TryFinally node as string"""
|
||||
return 'try:\n%s\nfinally:\n%s' % (self._stmt_list(node.body),
|
||||
self._stmt_list(node.finalbody))
|
||||
|
||||
def visit_tuple(self, node):
|
||||
"""return an astroid.Tuple node as string"""
|
||||
if len(node.elts) == 1:
|
||||
return '(%s, )' % node.elts[0].accept(self)
|
||||
return '(%s)' % ', '.join([child.accept(self) for child in node.elts])
|
||||
|
||||
def visit_unaryop(self, node):
|
||||
"""return an astroid.UnaryOp node as string"""
|
||||
if node.op == 'not':
|
||||
operator = 'not '
|
||||
else:
|
||||
operator = node.op
|
||||
return '%s%s' % (operator, node.operand.accept(self))
|
||||
|
||||
def visit_while(self, node):
|
||||
"""return an astroid.While node as string"""
|
||||
whiles = 'while %s:\n%s' % (node.test.accept(self),
|
||||
self._stmt_list(node.body))
|
||||
if node.orelse:
|
||||
whiles = '%s\nelse:\n%s' % (whiles, self._stmt_list(node.orelse))
|
||||
return whiles
|
||||
|
||||
def visit_with(self, node): # 'with' without 'as' is possible
|
||||
"""return an astroid.With node as string"""
|
||||
items = ', '.join(('(%s)' % expr.accept(self)) +
|
||||
(vars and ' as (%s)' % (vars.accept(self)) or '')
|
||||
for expr, vars in node.items)
|
||||
return 'with %s:\n%s' % (items, self._stmt_list(node.body))
|
||||
|
||||
def visit_yield(self, node):
|
||||
"""yield an ast.Yield node as string"""
|
||||
yi_val = node.value and (" " + node.value.accept(self)) or ""
|
||||
expr = 'yield' + yi_val
|
||||
if node.parent.is_statement:
|
||||
return expr
|
||||
else:
|
||||
return "(%s)" % (expr,)
|
||||
|
||||
|
||||
class AsStringVisitor3k(AsStringVisitor):
|
||||
"""AsStringVisitor3k overwrites some AsStringVisitor methods"""
|
||||
|
||||
def visit_excepthandler(self, node):
|
||||
if node.type:
|
||||
if node.name:
|
||||
excs = 'except %s as %s' % (node.type.accept(self),
|
||||
node.name.accept(self))
|
||||
else:
|
||||
excs = 'except %s' % node.type.accept(self)
|
||||
else:
|
||||
excs = 'except'
|
||||
return '%s:\n%s' % (excs, self._stmt_list(node.body))
|
||||
|
||||
def visit_nonlocal(self, node):
|
||||
"""return an astroid.Nonlocal node as string"""
|
||||
return 'nonlocal %s' % ', '.join(node.names)
|
||||
|
||||
def visit_raise(self, node):
|
||||
"""return an astroid.Raise node as string"""
|
||||
if node.exc:
|
||||
if node.cause:
|
||||
return 'raise %s from %s' % (node.exc.accept(self),
|
||||
node.cause.accept(self))
|
||||
return 'raise %s' % node.exc.accept(self)
|
||||
return 'raise'
|
||||
|
||||
def visit_starred(self, node):
|
||||
"""return Starred node as string"""
|
||||
return "*" + node.value.accept(self)
|
||||
|
||||
def visit_yieldfrom(self, node):
|
||||
""" Return an astroid.YieldFrom node as string. """
|
||||
yi_val = node.value and (" " + node.value.accept(self)) or ""
|
||||
expr = 'yield from' + yi_val
|
||||
if node.parent.is_statement:
|
||||
return expr
|
||||
else:
|
||||
return "(%s)" % (expr,)
|
||||
|
||||
|
||||
def _import_string(names):
|
||||
"""return a list of (name, asname) formatted as a string"""
|
||||
_names = []
|
||||
for name, asname in names:
|
||||
if asname is not None:
|
||||
_names.append('%s as %s' % (name, asname))
|
||||
else:
|
||||
_names.append(name)
|
||||
return ', '.join(_names)
|
||||
|
||||
|
||||
if sys.version_info >= (3, 0):
|
||||
AsStringVisitor = AsStringVisitor3k
|
||||
|
||||
# this visitor is stateless, thus it can be reused
|
||||
to_code = AsStringVisitor()
|
||||
|
||||
|
|
@ -1,86 +0,0 @@
|
|||
# copyright 2003-2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
|
||||
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
|
||||
#
|
||||
# This file is part of astroid.
|
||||
#
|
||||
# astroid is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by the
|
||||
# Free Software Foundation, either version 2.1 of the License, or (at your
|
||||
# option) any later version.
|
||||
#
|
||||
# astroid is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
|
||||
# for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License along
|
||||
# with astroid. If not, see <http://www.gnu.org/licenses/>.
|
||||
"""Small AST optimizations."""
|
||||
|
||||
import _ast
|
||||
|
||||
from astroid import nodes
|
||||
|
||||
|
||||
__all__ = ('ASTPeepholeOptimizer', )
|
||||
|
||||
|
||||
try:
|
||||
_TYPES = (_ast.Str, _ast.Bytes)
|
||||
except AttributeError:
|
||||
_TYPES = (_ast.Str, )
|
||||
|
||||
|
||||
class ASTPeepholeOptimizer(object):
|
||||
"""Class for applying small optimizations to generate new AST."""
|
||||
|
||||
def optimize_binop(self, node):
|
||||
"""Optimize BinOps with string Const nodes on the lhs.
|
||||
|
||||
This fixes an infinite recursion crash, where multiple
|
||||
strings are joined using the addition operator. With a
|
||||
sufficient number of such strings, astroid will fail
|
||||
with a maximum recursion limit exceeded. The
|
||||
function will return a Const node with all the strings
|
||||
already joined.
|
||||
Return ``None`` if no AST node can be obtained
|
||||
through optimization.
|
||||
"""
|
||||
ast_nodes = []
|
||||
current = node
|
||||
while isinstance(current, _ast.BinOp):
|
||||
# lhs must be a BinOp with the addition operand.
|
||||
if not isinstance(current.left, _ast.BinOp):
|
||||
return
|
||||
if (not isinstance(current.left.op, _ast.Add)
|
||||
or not isinstance(current.op, _ast.Add)):
|
||||
return
|
||||
|
||||
# rhs must a str / bytes.
|
||||
if not isinstance(current.right, _TYPES):
|
||||
return
|
||||
|
||||
ast_nodes.append(current.right.s)
|
||||
current = current.left
|
||||
|
||||
if (isinstance(current, _ast.BinOp)
|
||||
and isinstance(current.left, _TYPES)
|
||||
and isinstance(current.right, _TYPES)):
|
||||
# Stop early if we are at the last BinOp in
|
||||
# the operation
|
||||
ast_nodes.append(current.right.s)
|
||||
ast_nodes.append(current.left.s)
|
||||
break
|
||||
|
||||
if not ast_nodes:
|
||||
return
|
||||
|
||||
# If we have inconsistent types, bail out.
|
||||
known = type(ast_nodes[0])
|
||||
if any(type(element) is not known
|
||||
for element in ast_nodes[1:]):
|
||||
return
|
||||
|
||||
value = known().join(reversed(ast_nodes))
|
||||
newnode = nodes.Const(value)
|
||||
return newnode
|
||||
|
|
@ -1,652 +0,0 @@
|
|||
# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
|
||||
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
|
||||
#
|
||||
# This file is part of astroid.
|
||||
#
|
||||
# astroid is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by the
|
||||
# Free Software Foundation, either version 2.1 of the License, or (at your
|
||||
# option) any later version.
|
||||
#
|
||||
# astroid is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
|
||||
# for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License along
|
||||
# with astroid. If not, see <http://www.gnu.org/licenses/>.
|
||||
"""This module contains base classes and functions for the nodes and some
|
||||
inference utils.
|
||||
"""
|
||||
|
||||
__docformat__ = "restructuredtext en"
|
||||
|
||||
import sys
|
||||
from contextlib import contextmanager
|
||||
|
||||
from logilab.common.decorators import cachedproperty
|
||||
|
||||
from astroid.exceptions import (InferenceError, AstroidError, NotFoundError,
|
||||
UnresolvableName, UseInferenceDefault)
|
||||
|
||||
|
||||
if sys.version_info >= (3, 0):
|
||||
BUILTINS = 'builtins'
|
||||
else:
|
||||
BUILTINS = '__builtin__'
|
||||
|
||||
|
||||
class Proxy(object):
|
||||
"""a simple proxy object"""
|
||||
|
||||
_proxied = None # proxied object may be set by class or by instance
|
||||
|
||||
def __init__(self, proxied=None):
|
||||
if proxied is not None:
|
||||
self._proxied = proxied
|
||||
|
||||
def __getattr__(self, name):
|
||||
if name == '_proxied':
|
||||
return getattr(self.__class__, '_proxied')
|
||||
if name in self.__dict__:
|
||||
return self.__dict__[name]
|
||||
return getattr(self._proxied, name)
|
||||
|
||||
def infer(self, context=None):
|
||||
yield self
|
||||
|
||||
|
||||
# Inference ##################################################################
|
||||
|
||||
class InferenceContext(object):
|
||||
__slots__ = ('path', 'lookupname', 'callcontext', 'boundnode', 'infered')
|
||||
|
||||
def __init__(self, path=None, infered=None):
|
||||
self.path = path or set()
|
||||
self.lookupname = None
|
||||
self.callcontext = None
|
||||
self.boundnode = None
|
||||
self.infered = infered or {}
|
||||
|
||||
def push(self, node):
|
||||
name = self.lookupname
|
||||
if (node, name) in self.path:
|
||||
raise StopIteration()
|
||||
self.path.add((node, name))
|
||||
|
||||
def clone(self):
|
||||
# XXX copy lookupname/callcontext ?
|
||||
clone = InferenceContext(self.path, infered=self.infered)
|
||||
clone.callcontext = self.callcontext
|
||||
clone.boundnode = self.boundnode
|
||||
return clone
|
||||
|
||||
def cache_generator(self, key, generator):
|
||||
results = []
|
||||
for result in generator:
|
||||
results.append(result)
|
||||
yield result
|
||||
|
||||
self.infered[key] = tuple(results)
|
||||
return
|
||||
|
||||
@contextmanager
|
||||
def restore_path(self):
|
||||
path = set(self.path)
|
||||
yield
|
||||
self.path = path
|
||||
|
||||
def copy_context(context):
|
||||
if context is not None:
|
||||
return context.clone()
|
||||
else:
|
||||
return InferenceContext()
|
||||
|
||||
|
||||
def _infer_stmts(stmts, context, frame=None):
|
||||
"""return an iterator on statements inferred by each statement in <stmts>
|
||||
"""
|
||||
stmt = None
|
||||
infered = False
|
||||
if context is not None:
|
||||
name = context.lookupname
|
||||
context = context.clone()
|
||||
else:
|
||||
name = None
|
||||
context = InferenceContext()
|
||||
for stmt in stmts:
|
||||
if stmt is YES:
|
||||
yield stmt
|
||||
infered = True
|
||||
continue
|
||||
context.lookupname = stmt._infer_name(frame, name)
|
||||
try:
|
||||
for infered in stmt.infer(context):
|
||||
yield infered
|
||||
infered = True
|
||||
except UnresolvableName:
|
||||
continue
|
||||
except InferenceError:
|
||||
yield YES
|
||||
infered = True
|
||||
if not infered:
|
||||
raise InferenceError(str(stmt))
|
||||
|
||||
|
||||
# special inference objects (e.g. may be returned as nodes by .infer()) #######
|
||||
|
||||
class _Yes(object):
|
||||
"""a yes object"""
|
||||
def __repr__(self):
|
||||
return 'YES'
|
||||
def __getattribute__(self, name):
|
||||
if name == 'next':
|
||||
raise AttributeError('next method should not be called')
|
||||
if name.startswith('__') and name.endswith('__'):
|
||||
# to avoid inspection pb
|
||||
return super(_Yes, self).__getattribute__(name)
|
||||
return self
|
||||
def __call__(self, *args, **kwargs):
|
||||
return self
|
||||
|
||||
|
||||
YES = _Yes()
|
||||
|
||||
|
||||
class Instance(Proxy):
|
||||
"""a special node representing a class instance"""
|
||||
def getattr(self, name, context=None, lookupclass=True):
|
||||
try:
|
||||
values = self._proxied.instance_attr(name, context)
|
||||
except NotFoundError:
|
||||
if name == '__class__':
|
||||
return [self._proxied]
|
||||
if lookupclass:
|
||||
# class attributes not available through the instance
|
||||
# unless they are explicitly defined
|
||||
if name in ('__name__', '__bases__', '__mro__', '__subclasses__'):
|
||||
return self._proxied.local_attr(name)
|
||||
return self._proxied.getattr(name, context)
|
||||
raise NotFoundError(name)
|
||||
# since we've no context information, return matching class members as
|
||||
# well
|
||||
if lookupclass:
|
||||
try:
|
||||
return values + self._proxied.getattr(name, context)
|
||||
except NotFoundError:
|
||||
pass
|
||||
return values
|
||||
|
||||
def igetattr(self, name, context=None):
|
||||
"""inferred getattr"""
|
||||
if not context:
|
||||
context = InferenceContext()
|
||||
try:
|
||||
# avoid recursively inferring the same attr on the same class
|
||||
|
||||
context.push((self._proxied, name))
|
||||
# XXX frame should be self._proxied, or not ?
|
||||
get_attr = self.getattr(name, context, lookupclass=False)
|
||||
return _infer_stmts(
|
||||
self._wrap_attr(get_attr, context),
|
||||
context,
|
||||
frame=self,
|
||||
)
|
||||
except NotFoundError:
|
||||
try:
|
||||
# fallback to class'igetattr since it has some logic to handle
|
||||
# descriptors
|
||||
return self._wrap_attr(self._proxied.igetattr(name, context),
|
||||
context)
|
||||
except NotFoundError:
|
||||
raise InferenceError(name)
|
||||
|
||||
def _wrap_attr(self, attrs, context=None):
|
||||
"""wrap bound methods of attrs in a InstanceMethod proxies"""
|
||||
for attr in attrs:
|
||||
if isinstance(attr, UnboundMethod):
|
||||
if BUILTINS + '.property' in attr.decoratornames():
|
||||
for infered in attr.infer_call_result(self, context):
|
||||
yield infered
|
||||
else:
|
||||
yield BoundMethod(attr, self)
|
||||
else:
|
||||
yield attr
|
||||
|
||||
def infer_call_result(self, caller, context=None):
|
||||
"""infer what a class instance is returning when called"""
|
||||
infered = False
|
||||
for node in self._proxied.igetattr('__call__', context):
|
||||
if node is YES:
|
||||
continue
|
||||
for res in node.infer_call_result(caller, context):
|
||||
infered = True
|
||||
yield res
|
||||
if not infered:
|
||||
raise InferenceError()
|
||||
|
||||
def __repr__(self):
|
||||
return '<Instance of %s.%s at 0x%s>' % (self._proxied.root().name,
|
||||
self._proxied.name,
|
||||
id(self))
|
||||
def __str__(self):
|
||||
return 'Instance of %s.%s' % (self._proxied.root().name,
|
||||
self._proxied.name)
|
||||
|
||||
def callable(self):
|
||||
try:
|
||||
self._proxied.getattr('__call__')
|
||||
return True
|
||||
except NotFoundError:
|
||||
return False
|
||||
|
||||
def pytype(self):
|
||||
return self._proxied.qname()
|
||||
|
||||
def display_type(self):
|
||||
return 'Instance of'
|
||||
|
||||
|
||||
class UnboundMethod(Proxy):
|
||||
"""a special node representing a method not bound to an instance"""
|
||||
def __repr__(self):
|
||||
frame = self._proxied.parent.frame()
|
||||
return '<%s %s of %s at 0x%s' % (self.__class__.__name__,
|
||||
self._proxied.name,
|
||||
frame.qname(), id(self))
|
||||
|
||||
def is_bound(self):
|
||||
return False
|
||||
|
||||
def getattr(self, name, context=None):
|
||||
if name == 'im_func':
|
||||
return [self._proxied]
|
||||
return super(UnboundMethod, self).getattr(name, context)
|
||||
|
||||
def igetattr(self, name, context=None):
|
||||
if name == 'im_func':
|
||||
return iter((self._proxied,))
|
||||
return super(UnboundMethod, self).igetattr(name, context)
|
||||
|
||||
def infer_call_result(self, caller, context):
|
||||
# If we're unbound method __new__ of builtin object, the result is an
|
||||
# instance of the class given as first argument.
|
||||
if (self._proxied.name == '__new__' and
|
||||
self._proxied.parent.frame().qname() == '%s.object' % BUILTINS):
|
||||
infer = caller.args[0].infer() if caller.args else []
|
||||
return ((x is YES and x or Instance(x)) for x in infer)
|
||||
return self._proxied.infer_call_result(caller, context)
|
||||
|
||||
|
||||
class BoundMethod(UnboundMethod):
|
||||
"""a special node representing a method bound to an instance"""
|
||||
def __init__(self, proxy, bound):
|
||||
UnboundMethod.__init__(self, proxy)
|
||||
self.bound = bound
|
||||
|
||||
def is_bound(self):
|
||||
return True
|
||||
|
||||
def infer_call_result(self, caller, context):
|
||||
context = context.clone()
|
||||
context.boundnode = self.bound
|
||||
return self._proxied.infer_call_result(caller, context)
|
||||
|
||||
|
||||
class Generator(Instance):
|
||||
"""a special node representing a generator.
|
||||
|
||||
Proxied class is set once for all in raw_building.
|
||||
"""
|
||||
def callable(self):
|
||||
return False
|
||||
|
||||
def pytype(self):
|
||||
return '%s.generator' % BUILTINS
|
||||
|
||||
def display_type(self):
|
||||
return 'Generator'
|
||||
|
||||
def __repr__(self):
|
||||
return '<Generator(%s) l.%s at 0x%s>' % (self._proxied.name, self.lineno, id(self))
|
||||
|
||||
def __str__(self):
|
||||
return 'Generator(%s)' % (self._proxied.name)
|
||||
|
||||
|
||||
# decorators ##################################################################
|
||||
|
||||
def path_wrapper(func):
|
||||
"""return the given infer function wrapped to handle the path"""
|
||||
def wrapped(node, context=None, _func=func, **kwargs):
|
||||
"""wrapper function handling context"""
|
||||
if context is None:
|
||||
context = InferenceContext()
|
||||
context.push(node)
|
||||
yielded = set()
|
||||
for res in _func(node, context, **kwargs):
|
||||
# unproxy only true instance, not const, tuple, dict...
|
||||
if res.__class__ is Instance:
|
||||
ares = res._proxied
|
||||
else:
|
||||
ares = res
|
||||
if not ares in yielded:
|
||||
yield res
|
||||
yielded.add(ares)
|
||||
return wrapped
|
||||
|
||||
def yes_if_nothing_infered(func):
|
||||
def wrapper(*args, **kwargs):
|
||||
infered = False
|
||||
for node in func(*args, **kwargs):
|
||||
infered = True
|
||||
yield node
|
||||
if not infered:
|
||||
yield YES
|
||||
return wrapper
|
||||
|
||||
def raise_if_nothing_infered(func):
|
||||
def wrapper(*args, **kwargs):
|
||||
infered = False
|
||||
for node in func(*args, **kwargs):
|
||||
infered = True
|
||||
yield node
|
||||
if not infered:
|
||||
raise InferenceError()
|
||||
return wrapper
|
||||
|
||||
|
||||
# Node ######################################################################
|
||||
|
||||
class NodeNG(object):
|
||||
"""Base Class for all Astroid node classes.
|
||||
|
||||
It represents a node of the new abstract syntax tree.
|
||||
"""
|
||||
is_statement = False
|
||||
optional_assign = False # True for For (and for Comprehension if py <3.0)
|
||||
is_function = False # True for Function nodes
|
||||
# attributes below are set by the builder module or by raw factories
|
||||
lineno = None
|
||||
fromlineno = None
|
||||
tolineno = None
|
||||
col_offset = None
|
||||
# parent node in the tree
|
||||
parent = None
|
||||
# attributes containing child node(s) redefined in most concrete classes:
|
||||
_astroid_fields = ()
|
||||
# instance specific inference function infer(node, context)
|
||||
_explicit_inference = None
|
||||
|
||||
def infer(self, context=None, **kwargs):
|
||||
"""main interface to the interface system, return a generator on infered
|
||||
values.
|
||||
|
||||
If the instance has some explicit inference function set, it will be
|
||||
called instead of the default interface.
|
||||
"""
|
||||
if self._explicit_inference is not None:
|
||||
# explicit_inference is not bound, give it self explicitly
|
||||
try:
|
||||
return self._explicit_inference(self, context, **kwargs)
|
||||
except UseInferenceDefault:
|
||||
pass
|
||||
|
||||
if not context:
|
||||
return self._infer(context, **kwargs)
|
||||
|
||||
key = (self, context.lookupname,
|
||||
context.callcontext, context.boundnode)
|
||||
if key in context.infered:
|
||||
return iter(context.infered[key])
|
||||
|
||||
return context.cache_generator(key, self._infer(context, **kwargs))
|
||||
|
||||
def _repr_name(self):
|
||||
"""return self.name or self.attrname or '' for nice representation"""
|
||||
return getattr(self, 'name', getattr(self, 'attrname', ''))
|
||||
|
||||
def __str__(self):
|
||||
return '%s(%s)' % (self.__class__.__name__, self._repr_name())
|
||||
|
||||
def __repr__(self):
|
||||
return '<%s(%s) l.%s [%s] at 0x%x>' % (self.__class__.__name__,
|
||||
self._repr_name(),
|
||||
self.fromlineno,
|
||||
self.root().name,
|
||||
id(self))
|
||||
|
||||
|
||||
def accept(self, visitor):
|
||||
func = getattr(visitor, "visit_" + self.__class__.__name__.lower())
|
||||
return func(self)
|
||||
|
||||
def get_children(self):
|
||||
for field in self._astroid_fields:
|
||||
attr = getattr(self, field)
|
||||
if attr is None:
|
||||
continue
|
||||
if isinstance(attr, (list, tuple)):
|
||||
for elt in attr:
|
||||
yield elt
|
||||
else:
|
||||
yield attr
|
||||
|
||||
def last_child(self):
|
||||
"""an optimized version of list(get_children())[-1]"""
|
||||
for field in self._astroid_fields[::-1]:
|
||||
attr = getattr(self, field)
|
||||
if not attr: # None or empty listy / tuple
|
||||
continue
|
||||
if attr.__class__ in (list, tuple):
|
||||
return attr[-1]
|
||||
else:
|
||||
return attr
|
||||
return None
|
||||
|
||||
def parent_of(self, node):
|
||||
"""return true if i'm a parent of the given node"""
|
||||
parent = node.parent
|
||||
while parent is not None:
|
||||
if self is parent:
|
||||
return True
|
||||
parent = parent.parent
|
||||
return False
|
||||
|
||||
def statement(self):
|
||||
"""return the first parent node marked as statement node"""
|
||||
if self.is_statement:
|
||||
return self
|
||||
return self.parent.statement()
|
||||
|
||||
def frame(self):
|
||||
"""return the first parent frame node (i.e. Module, Function or Class)
|
||||
"""
|
||||
return self.parent.frame()
|
||||
|
||||
def scope(self):
|
||||
"""return the first node defining a new scope (i.e. Module, Function,
|
||||
Class, Lambda but also GenExpr)
|
||||
"""
|
||||
return self.parent.scope()
|
||||
|
||||
def root(self):
|
||||
"""return the root node of the tree, (i.e. a Module)"""
|
||||
if self.parent:
|
||||
return self.parent.root()
|
||||
return self
|
||||
|
||||
def child_sequence(self, child):
|
||||
"""search for the right sequence where the child lies in"""
|
||||
for field in self._astroid_fields:
|
||||
node_or_sequence = getattr(self, field)
|
||||
if node_or_sequence is child:
|
||||
return [node_or_sequence]
|
||||
# /!\ compiler.ast Nodes have an __iter__ walking over child nodes
|
||||
if isinstance(node_or_sequence, (tuple, list)) and child in node_or_sequence:
|
||||
return node_or_sequence
|
||||
else:
|
||||
msg = 'Could not find %s in %s\'s children'
|
||||
raise AstroidError(msg % (repr(child), repr(self)))
|
||||
|
||||
def locate_child(self, child):
|
||||
"""return a 2-uple (child attribute name, sequence or node)"""
|
||||
for field in self._astroid_fields:
|
||||
node_or_sequence = getattr(self, field)
|
||||
# /!\ compiler.ast Nodes have an __iter__ walking over child nodes
|
||||
if child is node_or_sequence:
|
||||
return field, child
|
||||
if isinstance(node_or_sequence, (tuple, list)) and child in node_or_sequence:
|
||||
return field, node_or_sequence
|
||||
msg = 'Could not find %s in %s\'s children'
|
||||
raise AstroidError(msg % (repr(child), repr(self)))
|
||||
# FIXME : should we merge child_sequence and locate_child ? locate_child
|
||||
# is only used in are_exclusive, child_sequence one time in pylint.
|
||||
|
||||
def next_sibling(self):
|
||||
"""return the next sibling statement"""
|
||||
return self.parent.next_sibling()
|
||||
|
||||
def previous_sibling(self):
|
||||
"""return the previous sibling statement"""
|
||||
return self.parent.previous_sibling()
|
||||
|
||||
def nearest(self, nodes):
|
||||
"""return the node which is the nearest before this one in the
|
||||
given list of nodes
|
||||
"""
|
||||
myroot = self.root()
|
||||
mylineno = self.fromlineno
|
||||
nearest = None, 0
|
||||
for node in nodes:
|
||||
assert node.root() is myroot, \
|
||||
'nodes %s and %s are not from the same module' % (self, node)
|
||||
lineno = node.fromlineno
|
||||
if node.fromlineno > mylineno:
|
||||
break
|
||||
if lineno > nearest[1]:
|
||||
nearest = node, lineno
|
||||
# FIXME: raise an exception if nearest is None ?
|
||||
return nearest[0]
|
||||
|
||||
# these are lazy because they're relatively expensive to compute for every
|
||||
# single node, and they rarely get looked at
|
||||
|
||||
@cachedproperty
|
||||
def fromlineno(self):
|
||||
if self.lineno is None:
|
||||
return self._fixed_source_line()
|
||||
else:
|
||||
return self.lineno
|
||||
|
||||
@cachedproperty
|
||||
def tolineno(self):
|
||||
if not self._astroid_fields:
|
||||
# can't have children
|
||||
lastchild = None
|
||||
else:
|
||||
lastchild = self.last_child()
|
||||
if lastchild is None:
|
||||
return self.fromlineno
|
||||
else:
|
||||
return lastchild.tolineno
|
||||
|
||||
# TODO / FIXME:
|
||||
assert self.fromlineno is not None, self
|
||||
assert self.tolineno is not None, self
|
||||
|
||||
def _fixed_source_line(self):
|
||||
"""return the line number where the given node appears
|
||||
|
||||
we need this method since not all nodes have the lineno attribute
|
||||
correctly set...
|
||||
"""
|
||||
line = self.lineno
|
||||
_node = self
|
||||
try:
|
||||
while line is None:
|
||||
_node = next(_node.get_children())
|
||||
line = _node.lineno
|
||||
except StopIteration:
|
||||
_node = self.parent
|
||||
while _node and line is None:
|
||||
line = _node.lineno
|
||||
_node = _node.parent
|
||||
return line
|
||||
|
||||
def block_range(self, lineno):
|
||||
"""handle block line numbers range for non block opening statements
|
||||
"""
|
||||
return lineno, self.tolineno
|
||||
|
||||
def set_local(self, name, stmt):
|
||||
"""delegate to a scoped parent handling a locals dictionary"""
|
||||
self.parent.set_local(name, stmt)
|
||||
|
||||
def nodes_of_class(self, klass, skip_klass=None):
|
||||
"""return an iterator on nodes which are instance of the given class(es)
|
||||
|
||||
klass may be a class object or a tuple of class objects
|
||||
"""
|
||||
if isinstance(self, klass):
|
||||
yield self
|
||||
for child_node in self.get_children():
|
||||
if skip_klass is not None and isinstance(child_node, skip_klass):
|
||||
continue
|
||||
for matching in child_node.nodes_of_class(klass, skip_klass):
|
||||
yield matching
|
||||
|
||||
def _infer_name(self, frame, name):
|
||||
# overridden for From, Import, Global, TryExcept and Arguments
|
||||
return None
|
||||
|
||||
def _infer(self, context=None):
|
||||
"""we don't know how to resolve a statement by default"""
|
||||
# this method is overridden by most concrete classes
|
||||
raise InferenceError(self.__class__.__name__)
|
||||
|
||||
def infered(self):
|
||||
'''return list of infered values for a more simple inference usage'''
|
||||
return list(self.infer())
|
||||
|
||||
def instanciate_class(self):
|
||||
"""instanciate a node if it is a Class node, else return self"""
|
||||
return self
|
||||
|
||||
def has_base(self, node):
|
||||
return False
|
||||
|
||||
def callable(self):
|
||||
return False
|
||||
|
||||
def eq(self, value):
|
||||
return False
|
||||
|
||||
def as_string(self):
|
||||
from astroid.as_string import to_code
|
||||
return to_code(self)
|
||||
|
||||
def repr_tree(self, ids=False):
|
||||
from astroid.as_string import dump
|
||||
return dump(self)
|
||||
|
||||
|
||||
class Statement(NodeNG):
|
||||
"""Statement node adding a few attributes"""
|
||||
is_statement = True
|
||||
|
||||
def next_sibling(self):
|
||||
"""return the next sibling statement"""
|
||||
stmts = self.parent.child_sequence(self)
|
||||
index = stmts.index(self)
|
||||
try:
|
||||
return stmts[index +1]
|
||||
except IndexError:
|
||||
pass
|
||||
|
||||
def previous_sibling(self):
|
||||
"""return the previous sibling statement"""
|
||||
stmts = self.parent.child_sequence(self)
|
||||
index = stmts.index(self)
|
||||
if index >= 1:
|
||||
return stmts[index -1]
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
|
@ -1,245 +0,0 @@
|
|||
"""Astroid hooks for various builtins."""
|
||||
|
||||
import sys
|
||||
from functools import partial
|
||||
from textwrap import dedent
|
||||
|
||||
import six
|
||||
from astroid import (MANAGER, UseInferenceDefault,
|
||||
inference_tip, YES, InferenceError, UnresolvableName)
|
||||
from astroid import nodes
|
||||
from astroid.builder import AstroidBuilder
|
||||
|
||||
|
||||
def _extend_str(class_node, rvalue):
|
||||
"""function to extend builtin str/unicode class"""
|
||||
# TODO(cpopa): this approach will make astroid to believe
|
||||
# that some arguments can be passed by keyword, but
|
||||
# unfortunately, strings and bytes don't accept keyword arguments.
|
||||
code = dedent('''
|
||||
class whatever(object):
|
||||
def join(self, iterable):
|
||||
return {rvalue}
|
||||
def replace(self, old, new, count=None):
|
||||
return {rvalue}
|
||||
def format(self, *args, **kwargs):
|
||||
return {rvalue}
|
||||
def encode(self, encoding='ascii', errors=None):
|
||||
return ''
|
||||
def decode(self, encoding='ascii', errors=None):
|
||||
return u''
|
||||
def capitalize(self):
|
||||
return {rvalue}
|
||||
def title(self):
|
||||
return {rvalue}
|
||||
def lower(self):
|
||||
return {rvalue}
|
||||
def upper(self):
|
||||
return {rvalue}
|
||||
def swapcase(self):
|
||||
return {rvalue}
|
||||
def index(self, sub, start=None, end=None):
|
||||
return 0
|
||||
def find(self, sub, start=None, end=None):
|
||||
return 0
|
||||
def count(self, sub, start=None, end=None):
|
||||
return 0
|
||||
def strip(self, chars=None):
|
||||
return {rvalue}
|
||||
def lstrip(self, chars=None):
|
||||
return {rvalue}
|
||||
def rstrip(self, chars=None):
|
||||
return {rvalue}
|
||||
def rjust(self, width, fillchar=None):
|
||||
return {rvalue}
|
||||
def center(self, width, fillchar=None):
|
||||
return {rvalue}
|
||||
def ljust(self, width, fillchar=None):
|
||||
return {rvalue}
|
||||
''')
|
||||
code = code.format(rvalue=rvalue)
|
||||
fake = AstroidBuilder(MANAGER).string_build(code)['whatever']
|
||||
for method in fake.mymethods():
|
||||
class_node.locals[method.name] = [method]
|
||||
method.parent = class_node
|
||||
|
||||
def extend_builtins(class_transforms):
|
||||
from astroid.bases import BUILTINS
|
||||
builtin_ast = MANAGER.astroid_cache[BUILTINS]
|
||||
for class_name, transform in class_transforms.items():
|
||||
transform(builtin_ast[class_name])
|
||||
|
||||
if sys.version_info > (3, 0):
|
||||
extend_builtins({'bytes': partial(_extend_str, rvalue="b''"),
|
||||
'str': partial(_extend_str, rvalue="''")})
|
||||
else:
|
||||
extend_builtins({'str': partial(_extend_str, rvalue="''"),
|
||||
'unicode': partial(_extend_str, rvalue="u''")})
|
||||
|
||||
|
||||
def register_builtin_transform(transform, builtin_name):
|
||||
"""Register a new transform function for the given *builtin_name*.
|
||||
|
||||
The transform function must accept two parameters, a node and
|
||||
an optional context.
|
||||
"""
|
||||
def _transform_wrapper(node, context=None):
|
||||
result = transform(node, context=context)
|
||||
if result:
|
||||
result.parent = node
|
||||
result.lineno = node.lineno
|
||||
result.col_offset = node.col_offset
|
||||
return iter([result])
|
||||
|
||||
MANAGER.register_transform(nodes.CallFunc,
|
||||
inference_tip(_transform_wrapper),
|
||||
lambda n: (isinstance(n.func, nodes.Name) and
|
||||
n.func.name == builtin_name))
|
||||
|
||||
|
||||
def _generic_inference(node, context, node_type, transform):
|
||||
args = node.args
|
||||
if not args:
|
||||
return node_type()
|
||||
if len(node.args) > 1:
|
||||
raise UseInferenceDefault()
|
||||
|
||||
arg, = args
|
||||
transformed = transform(arg)
|
||||
if not transformed:
|
||||
try:
|
||||
infered = next(arg.infer(context=context))
|
||||
except (InferenceError, StopIteration):
|
||||
raise UseInferenceDefault()
|
||||
if infered is YES:
|
||||
raise UseInferenceDefault()
|
||||
transformed = transform(infered)
|
||||
if not transformed or transformed is YES:
|
||||
raise UseInferenceDefault()
|
||||
return transformed
|
||||
|
||||
|
||||
def _generic_transform(arg, klass, iterables, build_elts):
|
||||
if isinstance(arg, klass):
|
||||
return arg
|
||||
elif isinstance(arg, iterables):
|
||||
if not all(isinstance(elt, nodes.Const)
|
||||
for elt in arg.elts):
|
||||
# TODO(cpopa): Don't support heterogenous elements.
|
||||
# Not yet, though.
|
||||
raise UseInferenceDefault()
|
||||
elts = [elt.value for elt in arg.elts]
|
||||
elif isinstance(arg, nodes.Dict):
|
||||
if not all(isinstance(elt[0], nodes.Const)
|
||||
for elt in arg.items):
|
||||
raise UseInferenceDefault()
|
||||
elts = [item[0].value for item in arg.items]
|
||||
elif (isinstance(arg, nodes.Const) and
|
||||
isinstance(arg.value, (six.string_types, six.binary_type))):
|
||||
elts = arg.value
|
||||
else:
|
||||
return
|
||||
return klass(elts=build_elts(elts))
|
||||
|
||||
|
||||
def _infer_builtin(node, context,
|
||||
klass=None, iterables=None,
|
||||
build_elts=None):
|
||||
transform_func = partial(
|
||||
_generic_transform,
|
||||
klass=klass,
|
||||
iterables=iterables,
|
||||
build_elts=build_elts)
|
||||
|
||||
return _generic_inference(node, context, klass, transform_func)
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
infer_tuple = partial(
|
||||
_infer_builtin,
|
||||
klass=nodes.Tuple,
|
||||
iterables=(nodes.List, nodes.Set),
|
||||
build_elts=tuple)
|
||||
|
||||
infer_list = partial(
|
||||
_infer_builtin,
|
||||
klass=nodes.List,
|
||||
iterables=(nodes.Tuple, nodes.Set),
|
||||
build_elts=list)
|
||||
|
||||
infer_set = partial(
|
||||
_infer_builtin,
|
||||
klass=nodes.Set,
|
||||
iterables=(nodes.List, nodes.Tuple),
|
||||
build_elts=set)
|
||||
|
||||
|
||||
def _get_elts(arg, context):
|
||||
is_iterable = lambda n: isinstance(n,
|
||||
(nodes.List, nodes.Tuple, nodes.Set))
|
||||
try:
|
||||
infered = next(arg.infer(context))
|
||||
except (InferenceError, UnresolvableName):
|
||||
raise UseInferenceDefault()
|
||||
if isinstance(infered, nodes.Dict):
|
||||
items = infered.items
|
||||
elif is_iterable(infered):
|
||||
items = []
|
||||
for elt in infered.elts:
|
||||
# If an item is not a pair of two items,
|
||||
# then fallback to the default inference.
|
||||
# Also, take in consideration only hashable items,
|
||||
# tuples and consts. We are choosing Names as well.
|
||||
if not is_iterable(elt):
|
||||
raise UseInferenceDefault()
|
||||
if len(elt.elts) != 2:
|
||||
raise UseInferenceDefault()
|
||||
if not isinstance(elt.elts[0],
|
||||
(nodes.Tuple, nodes.Const, nodes.Name)):
|
||||
raise UseInferenceDefault()
|
||||
items.append(tuple(elt.elts))
|
||||
else:
|
||||
raise UseInferenceDefault()
|
||||
return items
|
||||
|
||||
def infer_dict(node, context=None):
|
||||
"""Try to infer a dict call to a Dict node.
|
||||
|
||||
The function treats the following cases:
|
||||
|
||||
* dict()
|
||||
* dict(mapping)
|
||||
* dict(iterable)
|
||||
* dict(iterable, **kwargs)
|
||||
* dict(mapping, **kwargs)
|
||||
* dict(**kwargs)
|
||||
|
||||
If a case can't be infered, we'll fallback to default inference.
|
||||
"""
|
||||
has_keywords = lambda args: all(isinstance(arg, nodes.Keyword)
|
||||
for arg in args)
|
||||
if not node.args and not node.kwargs:
|
||||
# dict()
|
||||
return nodes.Dict()
|
||||
elif has_keywords(node.args) and node.args:
|
||||
# dict(a=1, b=2, c=4)
|
||||
items = [(nodes.Const(arg.arg), arg.value) for arg in node.args]
|
||||
elif (len(node.args) >= 2 and
|
||||
has_keywords(node.args[1:])):
|
||||
# dict(some_iterable, b=2, c=4)
|
||||
elts = _get_elts(node.args[0], context)
|
||||
keys = [(nodes.Const(arg.arg), arg.value) for arg in node.args[1:]]
|
||||
items = elts + keys
|
||||
elif len(node.args) == 1:
|
||||
items = _get_elts(node.args[0], context)
|
||||
else:
|
||||
raise UseInferenceDefault()
|
||||
|
||||
empty = nodes.Dict()
|
||||
empty.items = items
|
||||
return empty
|
||||
|
||||
# Builtins inference
|
||||
register_builtin_transform(infer_tuple, 'tuple')
|
||||
register_builtin_transform(infer_set, 'set')
|
||||
register_builtin_transform(infer_list, 'list')
|
||||
register_builtin_transform(infer_dict, 'dict')
|
||||
|
|
@ -1,155 +0,0 @@
|
|||
"""Astroid hooks for the Python 2 GObject introspection bindings.
|
||||
|
||||
Helps with understanding everything imported from 'gi.repository'
|
||||
"""
|
||||
|
||||
import inspect
|
||||
import itertools
|
||||
import sys
|
||||
import re
|
||||
|
||||
from astroid import MANAGER, AstroidBuildingException
|
||||
from astroid.builder import AstroidBuilder
|
||||
|
||||
|
||||
_inspected_modules = {}
|
||||
|
||||
_identifier_re = r'^[A-Za-z_]\w*$'
|
||||
|
||||
def _gi_build_stub(parent):
|
||||
"""
|
||||
Inspect the passed module recursively and build stubs for functions,
|
||||
classes, etc.
|
||||
"""
|
||||
classes = {}
|
||||
functions = {}
|
||||
constants = {}
|
||||
methods = {}
|
||||
for name in dir(parent):
|
||||
if name.startswith("__"):
|
||||
continue
|
||||
|
||||
# Check if this is a valid name in python
|
||||
if not re.match(_identifier_re, name):
|
||||
continue
|
||||
|
||||
try:
|
||||
obj = getattr(parent, name)
|
||||
except:
|
||||
continue
|
||||
|
||||
if inspect.isclass(obj):
|
||||
classes[name] = obj
|
||||
elif (inspect.isfunction(obj) or
|
||||
inspect.isbuiltin(obj)):
|
||||
functions[name] = obj
|
||||
elif (inspect.ismethod(obj) or
|
||||
inspect.ismethoddescriptor(obj)):
|
||||
methods[name] = obj
|
||||
elif type(obj) in [int, str]:
|
||||
constants[name] = obj
|
||||
elif (str(obj).startswith("<flags") or
|
||||
str(obj).startswith("<enum ") or
|
||||
str(obj).startswith("<GType ") or
|
||||
inspect.isdatadescriptor(obj)):
|
||||
constants[name] = 0
|
||||
elif callable(obj):
|
||||
# Fall back to a function for anything callable
|
||||
functions[name] = obj
|
||||
else:
|
||||
# Assume everything else is some manner of constant
|
||||
constants[name] = 0
|
||||
|
||||
ret = ""
|
||||
|
||||
if constants:
|
||||
ret += "# %s contants\n\n" % parent.__name__
|
||||
for name in sorted(constants):
|
||||
if name[0].isdigit():
|
||||
# GDK has some busted constant names like
|
||||
# Gdk.EventType.2BUTTON_PRESS
|
||||
continue
|
||||
|
||||
val = constants[name]
|
||||
|
||||
strval = str(val)
|
||||
if type(val) is str:
|
||||
strval = '"%s"' % str(val).replace("\\", "\\\\")
|
||||
ret += "%s = %s\n" % (name, strval)
|
||||
|
||||
if ret:
|
||||
ret += "\n\n"
|
||||
if functions:
|
||||
ret += "# %s functions\n\n" % parent.__name__
|
||||
for name in sorted(functions):
|
||||
func = functions[name]
|
||||
ret += "def %s(*args, **kwargs):\n" % name
|
||||
ret += " pass\n"
|
||||
|
||||
if ret:
|
||||
ret += "\n\n"
|
||||
if methods:
|
||||
ret += "# %s methods\n\n" % parent.__name__
|
||||
for name in sorted(methods):
|
||||
func = methods[name]
|
||||
ret += "def %s(self, *args, **kwargs):\n" % name
|
||||
ret += " pass\n"
|
||||
|
||||
if ret:
|
||||
ret += "\n\n"
|
||||
if classes:
|
||||
ret += "# %s classes\n\n" % parent.__name__
|
||||
for name in sorted(classes):
|
||||
ret += "class %s(object):\n" % name
|
||||
|
||||
classret = _gi_build_stub(classes[name])
|
||||
if not classret:
|
||||
classret = "pass\n"
|
||||
|
||||
for line in classret.splitlines():
|
||||
ret += " " + line + "\n"
|
||||
ret += "\n"
|
||||
|
||||
return ret
|
||||
|
||||
def _import_gi_module(modname):
|
||||
# we only consider gi.repository submodules
|
||||
if not modname.startswith('gi.repository.'):
|
||||
raise AstroidBuildingException()
|
||||
# build astroid representation unless we already tried so
|
||||
if modname not in _inspected_modules:
|
||||
modnames = [modname]
|
||||
optional_modnames = []
|
||||
|
||||
# GLib and GObject may have some special case handling
|
||||
# in pygobject that we need to cope with. However at
|
||||
# least as of pygobject3-3.13.91 the _glib module doesn't
|
||||
# exist anymore, so if treat these modules as optional.
|
||||
if modname == 'gi.repository.GLib':
|
||||
optional_modnames.append('gi._glib')
|
||||
elif modname == 'gi.repository.GObject':
|
||||
optional_modnames.append('gi._gobject')
|
||||
|
||||
try:
|
||||
modcode = ''
|
||||
for m in itertools.chain(modnames, optional_modnames):
|
||||
try:
|
||||
__import__(m)
|
||||
modcode += _gi_build_stub(sys.modules[m])
|
||||
except ImportError:
|
||||
if m not in optional_modnames:
|
||||
raise
|
||||
except ImportError:
|
||||
astng = _inspected_modules[modname] = None
|
||||
else:
|
||||
astng = AstroidBuilder(MANAGER).string_build(modcode, modname)
|
||||
_inspected_modules[modname] = astng
|
||||
else:
|
||||
astng = _inspected_modules[modname]
|
||||
if astng is None:
|
||||
raise AstroidBuildingException('Failed to import module %r' % modname)
|
||||
return astng
|
||||
|
||||
|
||||
MANAGER.register_failed_import_hook(_import_gi_module)
|
||||
|
||||
|
|
@ -1,18 +0,0 @@
|
|||
from astroid import MANAGER, register_module_extender
|
||||
from astroid.builder import AstroidBuilder
|
||||
|
||||
def mechanize_transform():
|
||||
return AstroidBuilder(MANAGER).string_build('''
|
||||
|
||||
class Browser(object):
|
||||
def open(self, url, data=None, timeout=None):
|
||||
return None
|
||||
def open_novisit(self, url, data=None, timeout=None):
|
||||
return None
|
||||
def open_local_file(self, filename):
|
||||
return None
|
||||
|
||||
''')
|
||||
|
||||
|
||||
register_module_extender(MANAGER, 'mechanize', mechanize_transform)
|
||||
|
|
@ -1,31 +0,0 @@
|
|||
"""Astroid hooks for pytest."""
|
||||
|
||||
from astroid import MANAGER, register_module_extender
|
||||
from astroid.builder import AstroidBuilder
|
||||
|
||||
|
||||
def pytest_transform():
|
||||
return AstroidBuilder(MANAGER).string_build('''
|
||||
|
||||
try:
|
||||
import _pytest.mark
|
||||
import _pytest.recwarn
|
||||
import _pytest.runner
|
||||
import _pytest.python
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
deprecated_call = _pytest.recwarn.deprecated_call
|
||||
exit = _pytest.runner.exit
|
||||
fail = _pytest.runner.fail
|
||||
fixture = _pytest.python.fixture
|
||||
importorskip = _pytest.runner.importorskip
|
||||
mark = _pytest.mark.MarkGenerator()
|
||||
raises = _pytest.python.raises
|
||||
skip = _pytest.runner.skip
|
||||
yield_fixture = _pytest.python.yield_fixture
|
||||
|
||||
''')
|
||||
|
||||
register_module_extender(MANAGER, 'pytest', pytest_transform)
|
||||
register_module_extender(MANAGER, 'py.test', pytest_transform)
|
||||
|
|
@ -1,22 +0,0 @@
|
|||
"""Astroid hooks for the Python 2 qt4 module.
|
||||
|
||||
Currently help understanding of :
|
||||
|
||||
* PyQT4.QtCore
|
||||
"""
|
||||
|
||||
from astroid import MANAGER, register_module_extender
|
||||
from astroid.builder import AstroidBuilder
|
||||
|
||||
|
||||
def pyqt4_qtcore_transform():
|
||||
return AstroidBuilder(MANAGER).string_build('''
|
||||
|
||||
def SIGNAL(signal_name): pass
|
||||
|
||||
class QObject(object):
|
||||
def emit(self, signal): pass
|
||||
''')
|
||||
|
||||
|
||||
register_module_extender(MANAGER, 'PyQt4.QtCore', pyqt4_qtcore_transform)
|
||||
|
|
@ -1,334 +0,0 @@
|
|||
|
||||
"""Astroid hooks for the Python 2 standard library.
|
||||
|
||||
Currently help understanding of :
|
||||
|
||||
* hashlib.md5 and hashlib.sha1
|
||||
"""
|
||||
|
||||
import sys
|
||||
from functools import partial
|
||||
from textwrap import dedent
|
||||
|
||||
from astroid import (
|
||||
MANAGER, AsStringRegexpPredicate,
|
||||
UseInferenceDefault, inference_tip,
|
||||
YES, InferenceError, register_module_extender)
|
||||
from astroid import exceptions
|
||||
from astroid import nodes
|
||||
from astroid.builder import AstroidBuilder
|
||||
|
||||
PY3K = sys.version_info > (3, 0)
|
||||
PY33 = sys.version_info >= (3, 3)
|
||||
|
||||
# general function
|
||||
|
||||
def infer_func_form(node, base_type, context=None, enum=False):
|
||||
"""Specific inference function for namedtuple or Python 3 enum. """
|
||||
def infer_first(node):
|
||||
try:
|
||||
value = next(node.infer(context=context))
|
||||
if value is YES:
|
||||
raise UseInferenceDefault()
|
||||
else:
|
||||
return value
|
||||
except StopIteration:
|
||||
raise InferenceError()
|
||||
|
||||
# node is a CallFunc node, class name as first argument and generated class
|
||||
# attributes as second argument
|
||||
if len(node.args) != 2:
|
||||
# something weird here, go back to class implementation
|
||||
raise UseInferenceDefault()
|
||||
# namedtuple or enums list of attributes can be a list of strings or a
|
||||
# whitespace-separate string
|
||||
try:
|
||||
name = infer_first(node.args[0]).value
|
||||
names = infer_first(node.args[1])
|
||||
try:
|
||||
attributes = names.value.replace(',', ' ').split()
|
||||
except AttributeError:
|
||||
if not enum:
|
||||
attributes = [infer_first(const).value for const in names.elts]
|
||||
else:
|
||||
# Enums supports either iterator of (name, value) pairs
|
||||
# or mappings.
|
||||
# TODO: support only list, tuples and mappings.
|
||||
if hasattr(names, 'items') and isinstance(names.items, list):
|
||||
attributes = [infer_first(const[0]).value
|
||||
for const in names.items
|
||||
if isinstance(const[0], nodes.Const)]
|
||||
elif hasattr(names, 'elts'):
|
||||
# Enums can support either ["a", "b", "c"]
|
||||
# or [("a", 1), ("b", 2), ...], but they can't
|
||||
# be mixed.
|
||||
if all(isinstance(const, nodes.Tuple)
|
||||
for const in names.elts):
|
||||
attributes = [infer_first(const.elts[0]).value
|
||||
for const in names.elts
|
||||
if isinstance(const, nodes.Tuple)]
|
||||
else:
|
||||
attributes = [infer_first(const).value
|
||||
for const in names.elts]
|
||||
else:
|
||||
raise AttributeError
|
||||
if not attributes:
|
||||
raise AttributeError
|
||||
except (AttributeError, exceptions.InferenceError) as exc:
|
||||
raise UseInferenceDefault()
|
||||
# we want to return a Class node instance with proper attributes set
|
||||
class_node = nodes.Class(name, 'docstring')
|
||||
class_node.parent = node.parent
|
||||
# set base class=tuple
|
||||
class_node.bases.append(base_type)
|
||||
# XXX add __init__(*attributes) method
|
||||
for attr in attributes:
|
||||
fake_node = nodes.EmptyNode()
|
||||
fake_node.parent = class_node
|
||||
class_node.instance_attrs[attr] = [fake_node]
|
||||
return class_node, name, attributes
|
||||
|
||||
|
||||
# module specific transformation functions #####################################
|
||||
|
||||
def hashlib_transform():
|
||||
template = '''
|
||||
|
||||
class %(name)s(object):
|
||||
def __init__(self, value=''): pass
|
||||
def digest(self):
|
||||
return %(digest)s
|
||||
def copy(self):
|
||||
return self
|
||||
def update(self, value): pass
|
||||
def hexdigest(self):
|
||||
return ''
|
||||
@property
|
||||
def name(self):
|
||||
return %(name)r
|
||||
@property
|
||||
def block_size(self):
|
||||
return 1
|
||||
@property
|
||||
def digest_size(self):
|
||||
return 1
|
||||
'''
|
||||
algorithms = ('md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512')
|
||||
classes = "".join(
|
||||
template % {'name': hashfunc, 'digest': 'b""' if PY3K else '""'}
|
||||
for hashfunc in algorithms)
|
||||
return AstroidBuilder(MANAGER).string_build(classes)
|
||||
|
||||
|
||||
def collections_transform():
|
||||
return AstroidBuilder(MANAGER).string_build('''
|
||||
|
||||
class defaultdict(dict):
|
||||
default_factory = None
|
||||
def __missing__(self, key): pass
|
||||
|
||||
class deque(object):
|
||||
maxlen = 0
|
||||
def __init__(self, iterable=None, maxlen=None): pass
|
||||
def append(self, x): pass
|
||||
def appendleft(self, x): pass
|
||||
def clear(self): pass
|
||||
def count(self, x): return 0
|
||||
def extend(self, iterable): pass
|
||||
def extendleft(self, iterable): pass
|
||||
def pop(self): pass
|
||||
def popleft(self): pass
|
||||
def remove(self, value): pass
|
||||
def reverse(self): pass
|
||||
def rotate(self, n): pass
|
||||
def __iter__(self): return self
|
||||
|
||||
''')
|
||||
|
||||
|
||||
def pkg_resources_transform():
|
||||
return AstroidBuilder(MANAGER).string_build('''
|
||||
|
||||
def resource_exists(package_or_requirement, resource_name):
|
||||
pass
|
||||
|
||||
def resource_isdir(package_or_requirement, resource_name):
|
||||
pass
|
||||
|
||||
def resource_filename(package_or_requirement, resource_name):
|
||||
pass
|
||||
|
||||
def resource_stream(package_or_requirement, resource_name):
|
||||
pass
|
||||
|
||||
def resource_string(package_or_requirement, resource_name):
|
||||
pass
|
||||
|
||||
def resource_listdir(package_or_requirement, resource_name):
|
||||
pass
|
||||
|
||||
def extraction_error():
|
||||
pass
|
||||
|
||||
def get_cache_path(archive_name, names=()):
|
||||
pass
|
||||
|
||||
def postprocess(tempname, filename):
|
||||
pass
|
||||
|
||||
def set_extraction_path(path):
|
||||
pass
|
||||
|
||||
def cleanup_resources(force=False):
|
||||
pass
|
||||
|
||||
''')
|
||||
|
||||
|
||||
def subprocess_transform():
|
||||
if PY3K:
|
||||
communicate = (bytes('string', 'ascii'), bytes('string', 'ascii'))
|
||||
init = """
|
||||
def __init__(self, args, bufsize=0, executable=None,
|
||||
stdin=None, stdout=None, stderr=None,
|
||||
preexec_fn=None, close_fds=False, shell=False,
|
||||
cwd=None, env=None, universal_newlines=False,
|
||||
startupinfo=None, creationflags=0, restore_signals=True,
|
||||
start_new_session=False, pass_fds=()):
|
||||
pass
|
||||
"""
|
||||
else:
|
||||
communicate = ('string', 'string')
|
||||
init = """
|
||||
def __init__(self, args, bufsize=0, executable=None,
|
||||
stdin=None, stdout=None, stderr=None,
|
||||
preexec_fn=None, close_fds=False, shell=False,
|
||||
cwd=None, env=None, universal_newlines=False,
|
||||
startupinfo=None, creationflags=0):
|
||||
pass
|
||||
"""
|
||||
if PY33:
|
||||
wait_signature = 'def wait(self, timeout=None)'
|
||||
else:
|
||||
wait_signature = 'def wait(self)'
|
||||
return AstroidBuilder(MANAGER).string_build('''
|
||||
|
||||
class Popen(object):
|
||||
returncode = pid = 0
|
||||
stdin = stdout = stderr = file()
|
||||
|
||||
%(init)s
|
||||
|
||||
def communicate(self, input=None):
|
||||
return %(communicate)r
|
||||
%(wait_signature)s:
|
||||
return self.returncode
|
||||
def poll(self):
|
||||
return self.returncode
|
||||
def send_signal(self, signal):
|
||||
pass
|
||||
def terminate(self):
|
||||
pass
|
||||
def kill(self):
|
||||
pass
|
||||
''' % {'init': init,
|
||||
'communicate': communicate,
|
||||
'wait_signature': wait_signature})
|
||||
|
||||
|
||||
# namedtuple support ###########################################################
|
||||
|
||||
def looks_like_namedtuple(node):
|
||||
func = node.func
|
||||
if type(func) is nodes.Getattr:
|
||||
return func.attrname == 'namedtuple'
|
||||
if type(func) is nodes.Name:
|
||||
return func.name == 'namedtuple'
|
||||
return False
|
||||
|
||||
def infer_named_tuple(node, context=None):
|
||||
"""Specific inference function for namedtuple CallFunc node"""
|
||||
class_node, name, attributes = infer_func_form(node, nodes.Tuple._proxied,
|
||||
context=context)
|
||||
fake = AstroidBuilder(MANAGER).string_build('''
|
||||
class %(name)s(tuple):
|
||||
_fields = %(fields)r
|
||||
def _asdict(self):
|
||||
return self.__dict__
|
||||
@classmethod
|
||||
def _make(cls, iterable, new=tuple.__new__, len=len):
|
||||
return new(cls, iterable)
|
||||
def _replace(_self, **kwds):
|
||||
result = _self._make(map(kwds.pop, %(fields)r, _self))
|
||||
if kwds:
|
||||
raise ValueError('Got unexpected field names: %%r' %% list(kwds))
|
||||
return result
|
||||
''' % {'name': name, 'fields': attributes})
|
||||
class_node.locals['_asdict'] = fake.body[0].locals['_asdict']
|
||||
class_node.locals['_make'] = fake.body[0].locals['_make']
|
||||
class_node.locals['_replace'] = fake.body[0].locals['_replace']
|
||||
class_node.locals['_fields'] = fake.body[0].locals['_fields']
|
||||
# we use UseInferenceDefault, we can't be a generator so return an iterator
|
||||
return iter([class_node])
|
||||
|
||||
def infer_enum(node, context=None):
|
||||
""" Specific inference function for enum CallFunc node. """
|
||||
enum_meta = nodes.Class("EnumMeta", 'docstring')
|
||||
class_node = infer_func_form(node, enum_meta,
|
||||
context=context, enum=True)[0]
|
||||
return iter([class_node.instanciate_class()])
|
||||
|
||||
def infer_enum_class(node):
|
||||
""" Specific inference for enums. """
|
||||
names = set(('Enum', 'IntEnum', 'enum.Enum', 'enum.IntEnum'))
|
||||
for basename in node.basenames:
|
||||
# TODO: doesn't handle subclasses yet. This implementation
|
||||
# is a hack to support enums.
|
||||
if basename not in names:
|
||||
continue
|
||||
if node.root().name == 'enum':
|
||||
# Skip if the class is directly from enum module.
|
||||
break
|
||||
for local, values in node.locals.items():
|
||||
if any(not isinstance(value, nodes.AssName)
|
||||
for value in values):
|
||||
continue
|
||||
|
||||
stmt = values[0].statement()
|
||||
if isinstance(stmt.targets[0], nodes.Tuple):
|
||||
targets = stmt.targets[0].itered()
|
||||
else:
|
||||
targets = stmt.targets
|
||||
|
||||
new_targets = []
|
||||
for target in targets:
|
||||
# Replace all the assignments with our mocked class.
|
||||
classdef = dedent('''
|
||||
class %(name)s(object):
|
||||
@property
|
||||
def value(self):
|
||||
# Not the best return.
|
||||
return None
|
||||
@property
|
||||
def name(self):
|
||||
return %(name)r
|
||||
''' % {'name': target.name})
|
||||
fake = AstroidBuilder(MANAGER).string_build(classdef)[target.name]
|
||||
fake.parent = target.parent
|
||||
for method in node.mymethods():
|
||||
fake.locals[method.name] = [method]
|
||||
new_targets.append(fake.instanciate_class())
|
||||
node.locals[local] = new_targets
|
||||
break
|
||||
return node
|
||||
|
||||
|
||||
MANAGER.register_transform(nodes.CallFunc, inference_tip(infer_named_tuple),
|
||||
looks_like_namedtuple)
|
||||
MANAGER.register_transform(nodes.CallFunc, inference_tip(infer_enum),
|
||||
AsStringRegexpPredicate('Enum', 'func'))
|
||||
MANAGER.register_transform(nodes.Class, infer_enum_class)
|
||||
register_module_extender(MANAGER, 'hashlib', hashlib_transform)
|
||||
register_module_extender(MANAGER, 'collections', collections_transform)
|
||||
register_module_extender(MANAGER, 'pkg_resources', pkg_resources_transform)
|
||||
register_module_extender(MANAGER, 'subprocess', subprocess_transform)
|
||||
|
|
@ -1,79 +0,0 @@
|
|||
# copyright 2003-2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
|
||||
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
|
||||
#
|
||||
# This file is part of astroid.
|
||||
#
|
||||
# astroid is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by the
|
||||
# Free Software Foundation, either version 2.1 of the License, or (at your
|
||||
# option) any later version.
|
||||
#
|
||||
# astroid is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
|
||||
# for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License along
|
||||
# with astroid. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
"""Hooks for nose library."""
|
||||
|
||||
import re
|
||||
import textwrap
|
||||
|
||||
import astroid
|
||||
import astroid.builder
|
||||
|
||||
_BUILDER = astroid.builder.AstroidBuilder(astroid.MANAGER)
|
||||
|
||||
|
||||
def _pep8(name, caps=re.compile('([A-Z])')):
|
||||
return caps.sub(lambda m: '_' + m.groups()[0].lower(), name)
|
||||
|
||||
|
||||
def _nose_tools_functions():
|
||||
"""Get an iterator of names and bound methods."""
|
||||
module = _BUILDER.string_build(textwrap.dedent('''
|
||||
import unittest
|
||||
|
||||
class Test(unittest.TestCase):
|
||||
pass
|
||||
a = Test()
|
||||
'''))
|
||||
try:
|
||||
case = next(module['a'].infer())
|
||||
except astroid.InferenceError:
|
||||
return
|
||||
for method in case.methods():
|
||||
if method.name.startswith('assert') and '_' not in method.name:
|
||||
pep8_name = _pep8(method.name)
|
||||
yield pep8_name, astroid.BoundMethod(method, case)
|
||||
|
||||
|
||||
def _nose_tools_transform(node):
|
||||
for method_name, method in _nose_tools_functions():
|
||||
node.locals[method_name] = [method]
|
||||
|
||||
|
||||
def _nose_tools_trivial_transform():
|
||||
"""Custom transform for the nose.tools module."""
|
||||
stub = _BUILDER.string_build('''__all__ = []''')
|
||||
all_entries = ['ok_', 'eq_']
|
||||
|
||||
for pep8_name, method in _nose_tools_functions():
|
||||
all_entries.append(pep8_name)
|
||||
stub[pep8_name] = method
|
||||
|
||||
# Update the __all__ variable, since nose.tools
|
||||
# does this manually with .append.
|
||||
all_assign = stub['__all__'].parent
|
||||
all_object = astroid.List(all_entries)
|
||||
all_object.parent = all_assign
|
||||
all_assign.value = all_object
|
||||
return stub
|
||||
|
||||
|
||||
astroid.register_module_extender(astroid.MANAGER, 'nose.tools.trivial',
|
||||
_nose_tools_trivial_transform)
|
||||
astroid.MANAGER.register_transform(astroid.Module, _nose_tools_transform,
|
||||
lambda n: n.name == 'nose.tools')
|
||||
|
|
@ -1,261 +0,0 @@
|
|||
# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
|
||||
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
|
||||
#
|
||||
# This file is part of astroid.
|
||||
#
|
||||
# astroid is free software: you can redistribute it and/or modify it under
|
||||
# the terms of the GNU Lesser General Public License as published by the Free
|
||||
# Software Foundation, either version 2.1 of the License, or (at your option) any
|
||||
# later version.
|
||||
#
|
||||
# astroid is distributed in the hope that it will be useful, but WITHOUT
|
||||
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
|
||||
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
|
||||
# details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License along
|
||||
# with astroid. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
"""Astroid hooks for six.moves."""
|
||||
|
||||
import sys
|
||||
from textwrap import dedent
|
||||
|
||||
from astroid import MANAGER, register_module_extender
|
||||
from astroid.builder import AstroidBuilder
|
||||
from astroid.exceptions import AstroidBuildingException
|
||||
|
||||
def _indent(text, prefix, predicate=None):
|
||||
"""Adds 'prefix' to the beginning of selected lines in 'text'.
|
||||
|
||||
If 'predicate' is provided, 'prefix' will only be added to the lines
|
||||
where 'predicate(line)' is True. If 'predicate' is not provided,
|
||||
it will default to adding 'prefix' to all non-empty lines that do not
|
||||
consist solely of whitespace characters.
|
||||
"""
|
||||
if predicate is None:
|
||||
predicate = lambda line: line.strip()
|
||||
|
||||
def prefixed_lines():
|
||||
for line in text.splitlines(True):
|
||||
yield prefix + line if predicate(line) else line
|
||||
return ''.join(prefixed_lines())
|
||||
|
||||
|
||||
if sys.version_info[0] == 2:
|
||||
_IMPORTS_2 = """
|
||||
import BaseHTTPServer
|
||||
import CGIHTTPServer
|
||||
import SimpleHTTPServer
|
||||
|
||||
from StringIO import StringIO
|
||||
from cStringIO import StringIO as cStringIO
|
||||
from UserDict import UserDict
|
||||
from UserList import UserList
|
||||
from UserString import UserString
|
||||
|
||||
import __builtin__ as builtins
|
||||
import thread as _thread
|
||||
import dummy_thread as _dummy_thread
|
||||
import ConfigParser as configparser
|
||||
import copy_reg as copyreg
|
||||
from itertools import (imap as map,
|
||||
ifilter as filter,
|
||||
ifilterfalse as filterfalse,
|
||||
izip_longest as zip_longest,
|
||||
izip as zip)
|
||||
import htmlentitydefs as html_entities
|
||||
import HTMLParser as html_parser
|
||||
import httplib as http_client
|
||||
import cookielib as http_cookiejar
|
||||
import Cookie as http_cookies
|
||||
import Queue as queue
|
||||
import repr as reprlib
|
||||
from pipes import quote as shlex_quote
|
||||
import SocketServer as socketserver
|
||||
import SimpleXMLRPCServer as xmlrpc_server
|
||||
import xmlrpclib as xmlrpc_client
|
||||
import _winreg as winreg
|
||||
import robotparser as urllib_robotparser
|
||||
import Tkinter as tkinter
|
||||
import tkFileDialog as tkinter_tkfiledialog
|
||||
|
||||
input = raw_input
|
||||
intern = intern
|
||||
range = xrange
|
||||
xrange = xrange
|
||||
reduce = reduce
|
||||
reload_module = reload
|
||||
|
||||
class UrllibParse(object):
|
||||
import urlparse as _urlparse
|
||||
import urllib as _urllib
|
||||
ParseResult = _urlparse.ParseResult
|
||||
SplitResult = _urlparse.SplitResult
|
||||
parse_qs = _urlparse.parse_qs
|
||||
parse_qsl = _urlparse.parse_qsl
|
||||
urldefrag = _urlparse.urldefrag
|
||||
urljoin = _urlparse.urljoin
|
||||
urlparse = _urlparse.urlparse
|
||||
urlsplit = _urlparse.urlsplit
|
||||
urlunparse = _urlparse.urlunparse
|
||||
urlunsplit = _urlparse.urlunsplit
|
||||
quote = _urllib.quote
|
||||
quote_plus = _urllib.quote_plus
|
||||
unquote = _urllib.unquote
|
||||
unquote_plus = _urllib.unquote_plus
|
||||
urlencode = _urllib.urlencode
|
||||
splitquery = _urllib.splitquery
|
||||
splittag = _urllib.splittag
|
||||
splituser = _urllib.splituser
|
||||
uses_fragment = _urlparse.uses_fragment
|
||||
uses_netloc = _urlparse.uses_netloc
|
||||
uses_params = _urlparse.uses_params
|
||||
uses_query = _urlparse.uses_query
|
||||
uses_relative = _urlparse.uses_relative
|
||||
|
||||
class UrllibError(object):
|
||||
import urllib2 as _urllib2
|
||||
import urllib as _urllib
|
||||
URLError = _urllib2.URLError
|
||||
HTTPError = _urllib2.HTTPError
|
||||
ContentTooShortError = _urllib.ContentTooShortError
|
||||
|
||||
class DummyModule(object):
|
||||
pass
|
||||
|
||||
class UrllibRequest(object):
|
||||
import urlparse as _urlparse
|
||||
import urllib2 as _urllib2
|
||||
import urllib as _urllib
|
||||
urlopen = _urllib2.urlopen
|
||||
install_opener = _urllib2.install_opener
|
||||
build_opener = _urllib2.build_opener
|
||||
pathname2url = _urllib.pathname2url
|
||||
url2pathname = _urllib.url2pathname
|
||||
getproxies = _urllib.getproxies
|
||||
Request = _urllib2.Request
|
||||
OpenerDirector = _urllib2.OpenerDirector
|
||||
HTTPDefaultErrorHandler = _urllib2.HTTPDefaultErrorHandler
|
||||
HTTPRedirectHandler = _urllib2.HTTPRedirectHandler
|
||||
HTTPCookieProcessor = _urllib2.HTTPCookieProcessor
|
||||
ProxyHandler = _urllib2.ProxyHandler
|
||||
BaseHandler = _urllib2.BaseHandler
|
||||
HTTPPasswordMgr = _urllib2.HTTPPasswordMgr
|
||||
HTTPPasswordMgrWithDefaultRealm = _urllib2.HTTPPasswordMgrWithDefaultRealm
|
||||
AbstractBasicAuthHandler = _urllib2.AbstractBasicAuthHandler
|
||||
HTTPBasicAuthHandler = _urllib2.HTTPBasicAuthHandler
|
||||
ProxyBasicAuthHandler = _urllib2.ProxyBasicAuthHandler
|
||||
AbstractDigestAuthHandler = _urllib2.AbstractDigestAuthHandler
|
||||
HTTPDigestAuthHandler = _urllib2.HTTPDigestAuthHandler
|
||||
ProxyDigestAuthHandler = _urllib2.ProxyDigestAuthHandler
|
||||
HTTPHandler = _urllib2.HTTPHandler
|
||||
HTTPSHandler = _urllib2.HTTPSHandler
|
||||
FileHandler = _urllib2.FileHandler
|
||||
FTPHandler = _urllib2.FTPHandler
|
||||
CacheFTPHandler = _urllib2.CacheFTPHandler
|
||||
UnknownHandler = _urllib2.UnknownHandler
|
||||
HTTPErrorProcessor = _urllib2.HTTPErrorProcessor
|
||||
urlretrieve = _urllib.urlretrieve
|
||||
urlcleanup = _urllib.urlcleanup
|
||||
proxy_bypass = _urllib.proxy_bypass
|
||||
|
||||
urllib_parse = UrllibParse()
|
||||
urllib_error = UrllibError()
|
||||
urllib = DummyModule()
|
||||
urllib.request = UrllibRequest()
|
||||
urllib.parse = UrllibParse()
|
||||
urllib.error = UrllibError()
|
||||
"""
|
||||
else:
|
||||
_IMPORTS_3 = """
|
||||
import _io
|
||||
cStringIO = _io.StringIO
|
||||
filter = filter
|
||||
from itertools import filterfalse
|
||||
input = input
|
||||
from sys import intern
|
||||
map = map
|
||||
range = range
|
||||
from imp import reload as reload_module
|
||||
from functools import reduce
|
||||
from shlex import quote as shlex_quote
|
||||
from io import StringIO
|
||||
from collections import UserDict, UserList, UserString
|
||||
xrange = range
|
||||
zip = zip
|
||||
from itertools import zip_longest
|
||||
import builtins
|
||||
import configparser
|
||||
import copyreg
|
||||
import _dummy_thread
|
||||
import http.cookiejar as http_cookiejar
|
||||
import http.cookies as http_cookies
|
||||
import html.entities as html_entities
|
||||
import html.parser as html_parser
|
||||
import http.client as http_client
|
||||
import http.server
|
||||
BaseHTTPServer = CGIHTTPServer = SimpleHTTPServer = http.server
|
||||
import pickle as cPickle
|
||||
import queue
|
||||
import reprlib
|
||||
import socketserver
|
||||
import _thread
|
||||
import winreg
|
||||
import xmlrpc.server as xmlrpc_server
|
||||
import xmlrpc.client as xmlrpc_client
|
||||
import urllib.robotparser as urllib_robotparser
|
||||
import email.mime.multipart as email_mime_multipart
|
||||
import email.mime.nonmultipart as email_mime_nonmultipart
|
||||
import email.mime.text as email_mime_text
|
||||
import email.mime.base as email_mime_base
|
||||
import urllib.parse as urllib_parse
|
||||
import urllib.error as urllib_error
|
||||
import tkinter
|
||||
import tkinter.dialog as tkinter_dialog
|
||||
import tkinter.filedialog as tkinter_filedialog
|
||||
import tkinter.scrolledtext as tkinter_scrolledtext
|
||||
import tkinter.simpledialog as tkinder_simpledialog
|
||||
import tkinter.tix as tkinter_tix
|
||||
import tkinter.ttk as tkinter_ttk
|
||||
import tkinter.constants as tkinter_constants
|
||||
import tkinter.dnd as tkinter_dnd
|
||||
import tkinter.colorchooser as tkinter_colorchooser
|
||||
import tkinter.commondialog as tkinter_commondialog
|
||||
import tkinter.filedialog as tkinter_tkfiledialog
|
||||
import tkinter.font as tkinter_font
|
||||
import tkinter.messagebox as tkinter_messagebox
|
||||
import urllib.request
|
||||
import urllib.robotparser as urllib_robotparser
|
||||
import urllib.parse as urllib_parse
|
||||
import urllib.error as urllib_error
|
||||
"""
|
||||
if sys.version_info[0] == 2:
|
||||
_IMPORTS = dedent(_IMPORTS_2)
|
||||
else:
|
||||
_IMPORTS = dedent(_IMPORTS_3)
|
||||
|
||||
|
||||
def six_moves_transform():
|
||||
code = dedent('''
|
||||
class Moves(object):
|
||||
{}
|
||||
moves = Moves()
|
||||
''').format(_indent(_IMPORTS, " "))
|
||||
module = AstroidBuilder(MANAGER).string_build(code)
|
||||
module.name = 'six.moves'
|
||||
return module
|
||||
|
||||
|
||||
def _six_fail_hook(modname):
|
||||
if modname != 'six.moves':
|
||||
raise AstroidBuildingException
|
||||
module = AstroidBuilder(MANAGER).string_build(_IMPORTS)
|
||||
module.name = 'six.moves'
|
||||
return module
|
||||
|
||||
|
||||
register_module_extender(MANAGER, 'six', six_moves_transform)
|
||||
register_module_extender(MANAGER, 'requests.packages.urllib3.packages.six',
|
||||
six_moves_transform)
|
||||
MANAGER.register_failed_import_hook(_six_fail_hook)
|
||||
|
|
@ -1,240 +0,0 @@
|
|||
# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
|
||||
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
|
||||
#
|
||||
# This file is part of astroid.
|
||||
#
|
||||
# astroid is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by the
|
||||
# Free Software Foundation, either version 2.1 of the License, or (at your
|
||||
# option) any later version.
|
||||
#
|
||||
# astroid is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
|
||||
# for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License along
|
||||
# with astroid. If not, see <http://www.gnu.org/licenses/>.
|
||||
"""The AstroidBuilder makes astroid from living object and / or from _ast
|
||||
|
||||
The builder is not thread safe and can't be used to parse different sources
|
||||
at the same time.
|
||||
"""
|
||||
from __future__ import with_statement
|
||||
|
||||
__docformat__ = "restructuredtext en"
|
||||
|
||||
import sys
|
||||
from os.path import splitext, basename, exists, abspath
|
||||
|
||||
from astroid.exceptions import AstroidBuildingException, InferenceError
|
||||
from astroid.raw_building import InspectBuilder
|
||||
from astroid.rebuilder import TreeRebuilder
|
||||
from astroid.manager import AstroidManager
|
||||
from astroid.bases import YES, Instance
|
||||
from astroid.modutils import modpath_from_file
|
||||
|
||||
from _ast import PyCF_ONLY_AST
|
||||
def parse(string):
|
||||
return compile(string, "<string>", 'exec', PyCF_ONLY_AST)
|
||||
|
||||
if sys.version_info >= (3, 0):
|
||||
from tokenize import detect_encoding
|
||||
|
||||
def open_source_file(filename):
|
||||
with open(filename, 'rb') as byte_stream:
|
||||
encoding = detect_encoding(byte_stream.readline)[0]
|
||||
stream = open(filename, 'r', newline=None, encoding=encoding)
|
||||
try:
|
||||
data = stream.read()
|
||||
except UnicodeError: # wrong encodingg
|
||||
# detect_encoding returns utf-8 if no encoding specified
|
||||
msg = 'Wrong (%s) or no encoding specified' % encoding
|
||||
raise AstroidBuildingException(msg)
|
||||
return stream, encoding, data
|
||||
|
||||
else:
|
||||
import re
|
||||
|
||||
_ENCODING_RGX = re.compile(r"\s*#+.*coding[:=]\s*([-\w.]+)")
|
||||
|
||||
def _guess_encoding(string):
|
||||
"""get encoding from a python file as string or return None if not found
|
||||
"""
|
||||
# check for UTF-8 byte-order mark
|
||||
if string.startswith('\xef\xbb\xbf'):
|
||||
return 'UTF-8'
|
||||
for line in string.split('\n', 2)[:2]:
|
||||
# check for encoding declaration
|
||||
match = _ENCODING_RGX.match(line)
|
||||
if match is not None:
|
||||
return match.group(1)
|
||||
|
||||
def open_source_file(filename):
|
||||
"""get data for parsing a file"""
|
||||
stream = open(filename, 'U')
|
||||
data = stream.read()
|
||||
encoding = _guess_encoding(data)
|
||||
return stream, encoding, data
|
||||
|
||||
# ast NG builder ##############################################################
|
||||
|
||||
MANAGER = AstroidManager()
|
||||
|
||||
class AstroidBuilder(InspectBuilder):
|
||||
"""provide astroid building methods"""
|
||||
|
||||
def __init__(self, manager=None):
|
||||
InspectBuilder.__init__(self)
|
||||
self._manager = manager or MANAGER
|
||||
|
||||
def module_build(self, module, modname=None):
|
||||
"""build an astroid from a living module instance
|
||||
"""
|
||||
node = None
|
||||
path = getattr(module, '__file__', None)
|
||||
if path is not None:
|
||||
path_, ext = splitext(module.__file__)
|
||||
if ext in ('.py', '.pyc', '.pyo') and exists(path_ + '.py'):
|
||||
node = self.file_build(path_ + '.py', modname)
|
||||
if node is None:
|
||||
# this is a built-in module
|
||||
# get a partial representation by introspection
|
||||
node = self.inspect_build(module, modname=modname, path=path)
|
||||
# we have to handle transformation by ourselves since the rebuilder
|
||||
# isn't called for builtin nodes
|
||||
#
|
||||
# XXX it's then only called for Module nodes, not for underlying
|
||||
# nodes
|
||||
node = self._manager.transform(node)
|
||||
return node
|
||||
|
||||
def file_build(self, path, modname=None):
|
||||
"""build astroid from a source code file (i.e. from an ast)
|
||||
|
||||
path is expected to be a python source file
|
||||
"""
|
||||
try:
|
||||
stream, encoding, data = open_source_file(path)
|
||||
except IOError as exc:
|
||||
msg = 'Unable to load file %r (%s)' % (path, exc)
|
||||
raise AstroidBuildingException(msg)
|
||||
except SyntaxError as exc: # py3k encoding specification error
|
||||
raise AstroidBuildingException(exc)
|
||||
except LookupError as exc: # unknown encoding
|
||||
raise AstroidBuildingException(exc)
|
||||
with stream:
|
||||
# get module name if necessary
|
||||
if modname is None:
|
||||
try:
|
||||
modname = '.'.join(modpath_from_file(path))
|
||||
except ImportError:
|
||||
modname = splitext(basename(path))[0]
|
||||
# build astroid representation
|
||||
module = self._data_build(data, modname, path)
|
||||
return self._post_build(module, encoding)
|
||||
|
||||
def string_build(self, data, modname='', path=None):
|
||||
"""build astroid from source code string and return rebuilded astroid"""
|
||||
module = self._data_build(data, modname, path)
|
||||
module.file_bytes = data.encode('utf-8')
|
||||
return self._post_build(module, 'utf-8')
|
||||
|
||||
def _post_build(self, module, encoding):
|
||||
"""handles encoding and delayed nodes
|
||||
after a module has been built
|
||||
"""
|
||||
module.file_encoding = encoding
|
||||
self._manager.cache_module(module)
|
||||
# post tree building steps after we stored the module in the cache:
|
||||
for from_node in module._from_nodes:
|
||||
if from_node.modname == '__future__':
|
||||
for symbol, _ in from_node.names:
|
||||
module.future_imports.add(symbol)
|
||||
self.add_from_names_to_locals(from_node)
|
||||
# handle delayed assattr nodes
|
||||
for delayed in module._delayed_assattr:
|
||||
self.delayed_assattr(delayed)
|
||||
return module
|
||||
|
||||
def _data_build(self, data, modname, path):
|
||||
"""build tree node from data and add some informations"""
|
||||
# this method could be wrapped with a pickle/cache function
|
||||
try:
|
||||
node = parse(data + '\n')
|
||||
except TypeError as exc:
|
||||
raise AstroidBuildingException(exc)
|
||||
if path is not None:
|
||||
node_file = abspath(path)
|
||||
else:
|
||||
node_file = '<?>'
|
||||
if modname.endswith('.__init__'):
|
||||
modname = modname[:-9]
|
||||
package = True
|
||||
else:
|
||||
package = path and path.find('__init__.py') > -1 or False
|
||||
rebuilder = TreeRebuilder(self._manager)
|
||||
module = rebuilder.visit_module(node, modname, node_file, package)
|
||||
module._from_nodes = rebuilder._from_nodes
|
||||
module._delayed_assattr = rebuilder._delayed_assattr
|
||||
return module
|
||||
|
||||
def add_from_names_to_locals(self, node):
|
||||
"""store imported names to the locals;
|
||||
resort the locals if coming from a delayed node
|
||||
"""
|
||||
|
||||
_key_func = lambda node: node.fromlineno
|
||||
def sort_locals(my_list):
|
||||
my_list.sort(key=_key_func)
|
||||
for (name, asname) in node.names:
|
||||
if name == '*':
|
||||
try:
|
||||
imported = node.do_import_module()
|
||||
except InferenceError:
|
||||
continue
|
||||
for name in imported.wildcard_import_names():
|
||||
node.parent.set_local(name, node)
|
||||
sort_locals(node.parent.scope().locals[name])
|
||||
else:
|
||||
node.parent.set_local(asname or name, node)
|
||||
sort_locals(node.parent.scope().locals[asname or name])
|
||||
|
||||
def delayed_assattr(self, node):
|
||||
"""visit a AssAttr node -> add name to locals, handle members
|
||||
definition
|
||||
"""
|
||||
try:
|
||||
frame = node.frame()
|
||||
for infered in node.expr.infer():
|
||||
if infered is YES:
|
||||
continue
|
||||
try:
|
||||
if infered.__class__ is Instance:
|
||||
infered = infered._proxied
|
||||
iattrs = infered.instance_attrs
|
||||
elif isinstance(infered, Instance):
|
||||
# Const, Tuple, ... we may be wrong, may be not, but
|
||||
# anyway we don't want to pollute builtin's namespace
|
||||
continue
|
||||
elif infered.is_function:
|
||||
iattrs = infered.instance_attrs
|
||||
else:
|
||||
iattrs = infered.locals
|
||||
except AttributeError:
|
||||
# XXX log error
|
||||
#import traceback
|
||||
#traceback.print_exc()
|
||||
continue
|
||||
values = iattrs.setdefault(node.attrname, [])
|
||||
if node in values:
|
||||
continue
|
||||
# get assign in __init__ first XXX useful ?
|
||||
if frame.name == '__init__' and values and not \
|
||||
values[0].frame().name == '__init__':
|
||||
values.insert(0, node)
|
||||
else:
|
||||
values.append(node)
|
||||
except InferenceError:
|
||||
pass
|
||||
|
||||
|
|
@ -1,51 +0,0 @@
|
|||
# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
|
||||
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
|
||||
#
|
||||
# This file is part of astroid.
|
||||
#
|
||||
# astroid is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by the
|
||||
# Free Software Foundation, either version 2.1 of the License, or (at your
|
||||
# option) any later version.
|
||||
#
|
||||
# astroid is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
|
||||
# for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License along
|
||||
# with astroid. If not, see <http://www.gnu.org/licenses/>.
|
||||
"""this module contains exceptions used in the astroid library
|
||||
|
||||
"""
|
||||
|
||||
__doctype__ = "restructuredtext en"
|
||||
|
||||
class AstroidError(Exception):
|
||||
"""base exception class for all astroid related exceptions"""
|
||||
|
||||
class AstroidBuildingException(AstroidError):
|
||||
"""exception class when we are unable to build an astroid representation"""
|
||||
|
||||
class ResolveError(AstroidError):
|
||||
"""base class of astroid resolution/inference error"""
|
||||
|
||||
class NotFoundError(ResolveError):
|
||||
"""raised when we are unable to resolve a name"""
|
||||
|
||||
class InferenceError(ResolveError):
|
||||
"""raised when we are unable to infer a node"""
|
||||
|
||||
class UseInferenceDefault(Exception):
|
||||
"""exception to be raised in custom inference function to indicate that it
|
||||
should go back to the default behaviour
|
||||
"""
|
||||
|
||||
class UnresolvableName(InferenceError):
|
||||
"""raised when we are unable to resolve a name"""
|
||||
|
||||
class NoDefault(AstroidError):
|
||||
"""raised by function's `default_value` method when an argument has
|
||||
no default value
|
||||
"""
|
||||
|
||||
|
|
@ -1,405 +0,0 @@
|
|||
# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
|
||||
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
|
||||
#
|
||||
# This file is part of astroid.
|
||||
#
|
||||
# astroid is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by the
|
||||
# Free Software Foundation, either version 2.1 of the License, or (at your
|
||||
# option) any later version.
|
||||
#
|
||||
# astroid is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
|
||||
# for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License along
|
||||
# with astroid. If not, see <http://www.gnu.org/licenses/>.
|
||||
"""this module contains a set of functions to handle inference on astroid trees
|
||||
"""
|
||||
|
||||
__doctype__ = "restructuredtext en"
|
||||
|
||||
from itertools import chain
|
||||
|
||||
from astroid import nodes
|
||||
|
||||
from astroid.manager import AstroidManager
|
||||
from astroid.exceptions import (AstroidError, InferenceError, NoDefault,
|
||||
NotFoundError, UnresolvableName)
|
||||
from astroid.bases import (YES, Instance, InferenceContext,
|
||||
_infer_stmts, copy_context, path_wrapper,
|
||||
raise_if_nothing_infered)
|
||||
from astroid.protocols import (
|
||||
_arguments_infer_argname,
|
||||
BIN_OP_METHOD, UNARY_OP_METHOD)
|
||||
|
||||
MANAGER = AstroidManager()
|
||||
|
||||
|
||||
class CallContext(object):
|
||||
"""when inferring a function call, this class is used to remember values
|
||||
given as argument
|
||||
"""
|
||||
def __init__(self, args, starargs, dstarargs):
|
||||
self.args = []
|
||||
self.nargs = {}
|
||||
for arg in args:
|
||||
if isinstance(arg, nodes.Keyword):
|
||||
self.nargs[arg.arg] = arg.value
|
||||
else:
|
||||
self.args.append(arg)
|
||||
self.starargs = starargs
|
||||
self.dstarargs = dstarargs
|
||||
|
||||
def infer_argument(self, funcnode, name, context):
|
||||
"""infer a function argument value according to the call context"""
|
||||
# 1. search in named keywords
|
||||
try:
|
||||
return self.nargs[name].infer(context)
|
||||
except KeyError:
|
||||
# Function.args.args can be None in astroid (means that we don't have
|
||||
# information on argnames)
|
||||
argindex = funcnode.args.find_argname(name)[0]
|
||||
if argindex is not None:
|
||||
# 2. first argument of instance/class method
|
||||
if argindex == 0 and funcnode.type in ('method', 'classmethod'):
|
||||
if context.boundnode is not None:
|
||||
boundnode = context.boundnode
|
||||
else:
|
||||
# XXX can do better ?
|
||||
boundnode = funcnode.parent.frame()
|
||||
if funcnode.type == 'method':
|
||||
if not isinstance(boundnode, Instance):
|
||||
boundnode = Instance(boundnode)
|
||||
return iter((boundnode,))
|
||||
if funcnode.type == 'classmethod':
|
||||
return iter((boundnode,))
|
||||
# if we have a method, extract one position
|
||||
# from the index, so we'll take in account
|
||||
# the extra parameter represented by `self` or `cls`
|
||||
if funcnode.type in ('method', 'classmethod'):
|
||||
argindex -= 1
|
||||
# 2. search arg index
|
||||
try:
|
||||
return self.args[argindex].infer(context)
|
||||
except IndexError:
|
||||
pass
|
||||
# 3. search in *args (.starargs)
|
||||
if self.starargs is not None:
|
||||
its = []
|
||||
for infered in self.starargs.infer(context):
|
||||
if infered is YES:
|
||||
its.append((YES,))
|
||||
continue
|
||||
try:
|
||||
its.append(infered.getitem(argindex, context).infer(context))
|
||||
except (InferenceError, AttributeError):
|
||||
its.append((YES,))
|
||||
except (IndexError, TypeError):
|
||||
continue
|
||||
if its:
|
||||
return chain(*its)
|
||||
# 4. XXX search in **kwargs (.dstarargs)
|
||||
if self.dstarargs is not None:
|
||||
its = []
|
||||
for infered in self.dstarargs.infer(context):
|
||||
if infered is YES:
|
||||
its.append((YES,))
|
||||
continue
|
||||
try:
|
||||
its.append(infered.getitem(name, context).infer(context))
|
||||
except (InferenceError, AttributeError):
|
||||
its.append((YES,))
|
||||
except (IndexError, TypeError):
|
||||
continue
|
||||
if its:
|
||||
return chain(*its)
|
||||
# 5. */** argument, (Tuple or Dict)
|
||||
if name == funcnode.args.vararg:
|
||||
return iter((nodes.const_factory(())))
|
||||
if name == funcnode.args.kwarg:
|
||||
return iter((nodes.const_factory({})))
|
||||
# 6. return default value if any
|
||||
try:
|
||||
return funcnode.args.default_value(name).infer(context)
|
||||
except NoDefault:
|
||||
raise InferenceError(name)
|
||||
|
||||
|
||||
# .infer method ###############################################################
|
||||
|
||||
|
||||
def infer_end(self, context=None):
|
||||
"""inference's end for node such as Module, Class, Function, Const...
|
||||
"""
|
||||
yield self
|
||||
nodes.Module._infer = infer_end
|
||||
nodes.Class._infer = infer_end
|
||||
nodes.Function._infer = infer_end
|
||||
nodes.Lambda._infer = infer_end
|
||||
nodes.Const._infer = infer_end
|
||||
nodes.List._infer = infer_end
|
||||
nodes.Tuple._infer = infer_end
|
||||
nodes.Dict._infer = infer_end
|
||||
nodes.Set._infer = infer_end
|
||||
|
||||
def _higher_function_scope(node):
|
||||
""" Search for the first function which encloses the given
|
||||
scope. This can be used for looking up in that function's
|
||||
scope, in case looking up in a lower scope for a particular
|
||||
name fails.
|
||||
|
||||
:param node: A scope node.
|
||||
:returns:
|
||||
``None``, if no parent function scope was found,
|
||||
otherwise an instance of :class:`astroid.scoped_nodes.Function`,
|
||||
which encloses the given node.
|
||||
"""
|
||||
current = node
|
||||
while current.parent and not isinstance(current.parent, nodes.Function):
|
||||
current = current.parent
|
||||
if current and current.parent:
|
||||
return current.parent
|
||||
|
||||
def infer_name(self, context=None):
|
||||
"""infer a Name: use name lookup rules"""
|
||||
frame, stmts = self.lookup(self.name)
|
||||
if not stmts:
|
||||
# Try to see if the name is enclosed in a nested function
|
||||
# and use the higher (first function) scope for searching.
|
||||
# TODO: should this be promoted to other nodes as well?
|
||||
parent_function = _higher_function_scope(self.scope())
|
||||
if parent_function:
|
||||
_, stmts = parent_function.lookup(self.name)
|
||||
|
||||
if not stmts:
|
||||
raise UnresolvableName(self.name)
|
||||
context = context.clone()
|
||||
context.lookupname = self.name
|
||||
return _infer_stmts(stmts, context, frame)
|
||||
nodes.Name._infer = path_wrapper(infer_name)
|
||||
nodes.AssName.infer_lhs = infer_name # won't work with a path wrapper
|
||||
|
||||
|
||||
def infer_callfunc(self, context=None):
|
||||
"""infer a CallFunc node by trying to guess what the function returns"""
|
||||
callcontext = context.clone()
|
||||
callcontext.callcontext = CallContext(self.args, self.starargs, self.kwargs)
|
||||
callcontext.boundnode = None
|
||||
for callee in self.func.infer(context):
|
||||
if callee is YES:
|
||||
yield callee
|
||||
continue
|
||||
try:
|
||||
if hasattr(callee, 'infer_call_result'):
|
||||
for infered in callee.infer_call_result(self, callcontext):
|
||||
yield infered
|
||||
except InferenceError:
|
||||
## XXX log error ?
|
||||
continue
|
||||
nodes.CallFunc._infer = path_wrapper(raise_if_nothing_infered(infer_callfunc))
|
||||
|
||||
|
||||
def infer_import(self, context=None, asname=True):
|
||||
"""infer an Import node: return the imported module/object"""
|
||||
name = context.lookupname
|
||||
if name is None:
|
||||
raise InferenceError()
|
||||
if asname:
|
||||
yield self.do_import_module(self.real_name(name))
|
||||
else:
|
||||
yield self.do_import_module(name)
|
||||
nodes.Import._infer = path_wrapper(infer_import)
|
||||
|
||||
def infer_name_module(self, name):
|
||||
context = InferenceContext()
|
||||
context.lookupname = name
|
||||
return self.infer(context, asname=False)
|
||||
nodes.Import.infer_name_module = infer_name_module
|
||||
|
||||
|
||||
def infer_from(self, context=None, asname=True):
|
||||
"""infer a From nodes: return the imported module/object"""
|
||||
name = context.lookupname
|
||||
if name is None:
|
||||
raise InferenceError()
|
||||
if asname:
|
||||
name = self.real_name(name)
|
||||
module = self.do_import_module()
|
||||
try:
|
||||
context = copy_context(context)
|
||||
context.lookupname = name
|
||||
return _infer_stmts(module.getattr(name, ignore_locals=module is self.root()), context)
|
||||
except NotFoundError:
|
||||
raise InferenceError(name)
|
||||
nodes.From._infer = path_wrapper(infer_from)
|
||||
|
||||
|
||||
def infer_getattr(self, context=None):
|
||||
"""infer a Getattr node by using getattr on the associated object"""
|
||||
for owner in self.expr.infer(context):
|
||||
if owner is YES:
|
||||
yield owner
|
||||
continue
|
||||
try:
|
||||
context.boundnode = owner
|
||||
for obj in owner.igetattr(self.attrname, context):
|
||||
yield obj
|
||||
context.boundnode = None
|
||||
except (NotFoundError, InferenceError):
|
||||
context.boundnode = None
|
||||
except AttributeError:
|
||||
# XXX method / function
|
||||
context.boundnode = None
|
||||
nodes.Getattr._infer = path_wrapper(raise_if_nothing_infered(infer_getattr))
|
||||
nodes.AssAttr.infer_lhs = raise_if_nothing_infered(infer_getattr) # # won't work with a path wrapper
|
||||
|
||||
|
||||
def infer_global(self, context=None):
|
||||
if context.lookupname is None:
|
||||
raise InferenceError()
|
||||
try:
|
||||
return _infer_stmts(self.root().getattr(context.lookupname), context)
|
||||
except NotFoundError:
|
||||
raise InferenceError()
|
||||
nodes.Global._infer = path_wrapper(infer_global)
|
||||
|
||||
|
||||
def infer_subscript(self, context=None):
|
||||
"""infer simple subscription such as [1,2,3][0] or (1,2,3)[-1]"""
|
||||
value = next(self.value.infer(context))
|
||||
if value is YES:
|
||||
yield YES
|
||||
return
|
||||
|
||||
index = next(self.slice.infer(context))
|
||||
if index is YES:
|
||||
yield YES
|
||||
return
|
||||
|
||||
if isinstance(index, nodes.Const):
|
||||
try:
|
||||
assigned = value.getitem(index.value, context)
|
||||
except AttributeError:
|
||||
raise InferenceError()
|
||||
except (IndexError, TypeError):
|
||||
yield YES
|
||||
return
|
||||
|
||||
# Prevent inferring if the infered subscript
|
||||
# is the same as the original subscripted object.
|
||||
if self is assigned:
|
||||
yield YES
|
||||
return
|
||||
for infered in assigned.infer(context):
|
||||
yield infered
|
||||
else:
|
||||
raise InferenceError()
|
||||
nodes.Subscript._infer = path_wrapper(infer_subscript)
|
||||
nodes.Subscript.infer_lhs = raise_if_nothing_infered(infer_subscript)
|
||||
|
||||
def infer_unaryop(self, context=None):
|
||||
for operand in self.operand.infer(context):
|
||||
try:
|
||||
yield operand.infer_unary_op(self.op)
|
||||
except TypeError:
|
||||
continue
|
||||
except AttributeError:
|
||||
meth = UNARY_OP_METHOD[self.op]
|
||||
if meth is None:
|
||||
yield YES
|
||||
else:
|
||||
try:
|
||||
# XXX just suppose if the type implement meth, returned type
|
||||
# will be the same
|
||||
operand.getattr(meth)
|
||||
yield operand
|
||||
except GeneratorExit:
|
||||
raise
|
||||
except:
|
||||
yield YES
|
||||
nodes.UnaryOp._infer = path_wrapper(infer_unaryop)
|
||||
|
||||
def _infer_binop(operator, operand1, operand2, context, failures=None):
|
||||
if operand1 is YES:
|
||||
yield operand1
|
||||
return
|
||||
try:
|
||||
for valnode in operand1.infer_binary_op(operator, operand2, context):
|
||||
yield valnode
|
||||
except AttributeError:
|
||||
try:
|
||||
# XXX just suppose if the type implement meth, returned type
|
||||
# will be the same
|
||||
operand1.getattr(BIN_OP_METHOD[operator])
|
||||
yield operand1
|
||||
except:
|
||||
if failures is None:
|
||||
yield YES
|
||||
else:
|
||||
failures.append(operand1)
|
||||
|
||||
def infer_binop(self, context=None):
|
||||
failures = []
|
||||
for lhs in self.left.infer(context):
|
||||
for val in _infer_binop(self.op, lhs, self.right, context, failures):
|
||||
yield val
|
||||
for lhs in failures:
|
||||
for rhs in self.right.infer(context):
|
||||
for val in _infer_binop(self.op, rhs, lhs, context):
|
||||
yield val
|
||||
nodes.BinOp._infer = path_wrapper(infer_binop)
|
||||
|
||||
|
||||
def infer_arguments(self, context=None):
|
||||
name = context.lookupname
|
||||
if name is None:
|
||||
raise InferenceError()
|
||||
return _arguments_infer_argname(self, name, context)
|
||||
nodes.Arguments._infer = infer_arguments
|
||||
|
||||
|
||||
def infer_ass(self, context=None):
|
||||
"""infer a AssName/AssAttr: need to inspect the RHS part of the
|
||||
assign node
|
||||
"""
|
||||
stmt = self.statement()
|
||||
if isinstance(stmt, nodes.AugAssign):
|
||||
return stmt.infer(context)
|
||||
stmts = list(self.assigned_stmts(context=context))
|
||||
return _infer_stmts(stmts, context)
|
||||
nodes.AssName._infer = path_wrapper(infer_ass)
|
||||
nodes.AssAttr._infer = path_wrapper(infer_ass)
|
||||
|
||||
def infer_augassign(self, context=None):
|
||||
failures = []
|
||||
for lhs in self.target.infer_lhs(context):
|
||||
for val in _infer_binop(self.op, lhs, self.value, context, failures):
|
||||
yield val
|
||||
for lhs in failures:
|
||||
for rhs in self.value.infer(context):
|
||||
for val in _infer_binop(self.op, rhs, lhs, context):
|
||||
yield val
|
||||
nodes.AugAssign._infer = path_wrapper(infer_augassign)
|
||||
|
||||
|
||||
# no infer method on DelName and DelAttr (expected InferenceError)
|
||||
|
||||
|
||||
def infer_empty_node(self, context=None):
|
||||
if not self.has_underlying_object():
|
||||
yield YES
|
||||
else:
|
||||
try:
|
||||
for infered in MANAGER.infer_ast_from_something(self.object,
|
||||
context=context):
|
||||
yield infered
|
||||
except AstroidError:
|
||||
yield YES
|
||||
nodes.EmptyNode._infer = path_wrapper(infer_empty_node)
|
||||
|
||||
|
||||
def infer_index(self, context=None):
|
||||
return self.value.infer(context)
|
||||
nodes.Index._infer = infer_index
|
||||
|
|
@ -1,273 +0,0 @@
|
|||
# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
|
||||
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
|
||||
#
|
||||
# This file is part of astroid.
|
||||
#
|
||||
# astroid is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by the
|
||||
# Free Software Foundation, either version 2.1 of the License, or (at your
|
||||
# option) any later version.
|
||||
#
|
||||
# astroid is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
|
||||
# for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License along
|
||||
# with astroid. If not, see <http://www.gnu.org/licenses/>.
|
||||
"""visitor doing some postprocessing on the astroid tree.
|
||||
Try to resolve definitions (namespace) dictionary, relationship...
|
||||
|
||||
This module has been imported from pyreverse
|
||||
"""
|
||||
|
||||
__docformat__ = "restructuredtext en"
|
||||
|
||||
from os.path import dirname
|
||||
|
||||
import astroid
|
||||
from astroid.exceptions import InferenceError
|
||||
from astroid.utils import LocalsVisitor
|
||||
from astroid.modutils import get_module_part, is_relative, is_standard_module
|
||||
|
||||
class IdGeneratorMixIn(object):
|
||||
"""
|
||||
Mixin adding the ability to generate integer uid
|
||||
"""
|
||||
def __init__(self, start_value=0):
|
||||
self.id_count = start_value
|
||||
|
||||
def init_counter(self, start_value=0):
|
||||
"""init the id counter
|
||||
"""
|
||||
self.id_count = start_value
|
||||
|
||||
def generate_id(self):
|
||||
"""generate a new identifier
|
||||
"""
|
||||
self.id_count += 1
|
||||
return self.id_count
|
||||
|
||||
|
||||
class Linker(IdGeneratorMixIn, LocalsVisitor):
|
||||
"""
|
||||
walk on the project tree and resolve relationships.
|
||||
|
||||
According to options the following attributes may be added to visited nodes:
|
||||
|
||||
* uid,
|
||||
a unique identifier for the node (on astroid.Project, astroid.Module,
|
||||
astroid.Class and astroid.locals_type). Only if the linker has been instantiated
|
||||
with tag=True parameter (False by default).
|
||||
|
||||
* Function
|
||||
a mapping from locals names to their bounded value, which may be a
|
||||
constant like a string or an integer, or an astroid node (on astroid.Module,
|
||||
astroid.Class and astroid.Function).
|
||||
|
||||
* instance_attrs_type
|
||||
as locals_type but for klass member attributes (only on astroid.Class)
|
||||
|
||||
* implements,
|
||||
list of implemented interface _objects_ (only on astroid.Class nodes)
|
||||
"""
|
||||
|
||||
def __init__(self, project, inherited_interfaces=0, tag=False):
|
||||
IdGeneratorMixIn.__init__(self)
|
||||
LocalsVisitor.__init__(self)
|
||||
# take inherited interface in consideration or not
|
||||
self.inherited_interfaces = inherited_interfaces
|
||||
# tag nodes or not
|
||||
self.tag = tag
|
||||
# visited project
|
||||
self.project = project
|
||||
|
||||
|
||||
def visit_project(self, node):
|
||||
"""visit an astroid.Project node
|
||||
|
||||
* optionally tag the node with a unique id
|
||||
"""
|
||||
if self.tag:
|
||||
node.uid = self.generate_id()
|
||||
for module in node.modules:
|
||||
self.visit(module)
|
||||
|
||||
def visit_package(self, node):
|
||||
"""visit an astroid.Package node
|
||||
|
||||
* optionally tag the node with a unique id
|
||||
"""
|
||||
if self.tag:
|
||||
node.uid = self.generate_id()
|
||||
for subelmt in node.values():
|
||||
self.visit(subelmt)
|
||||
|
||||
def visit_module(self, node):
|
||||
"""visit an astroid.Module node
|
||||
|
||||
* set the locals_type mapping
|
||||
* set the depends mapping
|
||||
* optionally tag the node with a unique id
|
||||
"""
|
||||
if hasattr(node, 'locals_type'):
|
||||
return
|
||||
node.locals_type = {}
|
||||
node.depends = []
|
||||
if self.tag:
|
||||
node.uid = self.generate_id()
|
||||
|
||||
def visit_class(self, node):
|
||||
"""visit an astroid.Class node
|
||||
|
||||
* set the locals_type and instance_attrs_type mappings
|
||||
* set the implements list and build it
|
||||
* optionally tag the node with a unique id
|
||||
"""
|
||||
if hasattr(node, 'locals_type'):
|
||||
return
|
||||
node.locals_type = {}
|
||||
if self.tag:
|
||||
node.uid = self.generate_id()
|
||||
# resolve ancestors
|
||||
for baseobj in node.ancestors(recurs=False):
|
||||
specializations = getattr(baseobj, 'specializations', [])
|
||||
specializations.append(node)
|
||||
baseobj.specializations = specializations
|
||||
# resolve instance attributes
|
||||
node.instance_attrs_type = {}
|
||||
for assattrs in node.instance_attrs.values():
|
||||
for assattr in assattrs:
|
||||
self.handle_assattr_type(assattr, node)
|
||||
# resolve implemented interface
|
||||
try:
|
||||
node.implements = list(node.interfaces(self.inherited_interfaces))
|
||||
except InferenceError:
|
||||
node.implements = ()
|
||||
|
||||
def visit_function(self, node):
|
||||
"""visit an astroid.Function node
|
||||
|
||||
* set the locals_type mapping
|
||||
* optionally tag the node with a unique id
|
||||
"""
|
||||
if hasattr(node, 'locals_type'):
|
||||
return
|
||||
node.locals_type = {}
|
||||
if self.tag:
|
||||
node.uid = self.generate_id()
|
||||
|
||||
link_project = visit_project
|
||||
link_module = visit_module
|
||||
link_class = visit_class
|
||||
link_function = visit_function
|
||||
|
||||
def visit_assname(self, node):
|
||||
"""visit an astroid.AssName node
|
||||
|
||||
handle locals_type
|
||||
"""
|
||||
# avoid double parsing done by different Linkers.visit
|
||||
# running over the same project:
|
||||
if hasattr(node, '_handled'):
|
||||
return
|
||||
node._handled = True
|
||||
if node.name in node.frame():
|
||||
frame = node.frame()
|
||||
else:
|
||||
# the name has been defined as 'global' in the frame and belongs
|
||||
# there. Btw the frame is not yet visited as the name is in the
|
||||
# root locals; the frame hence has no locals_type attribute
|
||||
frame = node.root()
|
||||
try:
|
||||
values = node.infered()
|
||||
try:
|
||||
already_infered = frame.locals_type[node.name]
|
||||
for valnode in values:
|
||||
if not valnode in already_infered:
|
||||
already_infered.append(valnode)
|
||||
except KeyError:
|
||||
frame.locals_type[node.name] = values
|
||||
except astroid.InferenceError:
|
||||
pass
|
||||
|
||||
def handle_assattr_type(self, node, parent):
|
||||
"""handle an astroid.AssAttr node
|
||||
|
||||
handle instance_attrs_type
|
||||
"""
|
||||
try:
|
||||
values = list(node.infer())
|
||||
try:
|
||||
already_infered = parent.instance_attrs_type[node.attrname]
|
||||
for valnode in values:
|
||||
if not valnode in already_infered:
|
||||
already_infered.append(valnode)
|
||||
except KeyError:
|
||||
parent.instance_attrs_type[node.attrname] = values
|
||||
except astroid.InferenceError:
|
||||
pass
|
||||
|
||||
def visit_import(self, node):
|
||||
"""visit an astroid.Import node
|
||||
|
||||
resolve module dependencies
|
||||
"""
|
||||
context_file = node.root().file
|
||||
for name in node.names:
|
||||
relative = is_relative(name[0], context_file)
|
||||
self._imported_module(node, name[0], relative)
|
||||
|
||||
|
||||
def visit_from(self, node):
|
||||
"""visit an astroid.From node
|
||||
|
||||
resolve module dependencies
|
||||
"""
|
||||
basename = node.modname
|
||||
context_file = node.root().file
|
||||
if context_file is not None:
|
||||
relative = is_relative(basename, context_file)
|
||||
else:
|
||||
relative = False
|
||||
for name in node.names:
|
||||
if name[0] == '*':
|
||||
continue
|
||||
# analyze dependencies
|
||||
fullname = '%s.%s' % (basename, name[0])
|
||||
if fullname.find('.') > -1:
|
||||
try:
|
||||
# XXX: don't use get_module_part, missing package precedence
|
||||
fullname = get_module_part(fullname, context_file)
|
||||
except ImportError:
|
||||
continue
|
||||
if fullname != basename:
|
||||
self._imported_module(node, fullname, relative)
|
||||
|
||||
|
||||
def compute_module(self, context_name, mod_path):
|
||||
"""return true if the module should be added to dependencies"""
|
||||
package_dir = dirname(self.project.path)
|
||||
if context_name == mod_path:
|
||||
return 0
|
||||
elif is_standard_module(mod_path, (package_dir,)):
|
||||
return 1
|
||||
return 0
|
||||
|
||||
# protected methods ########################################################
|
||||
|
||||
def _imported_module(self, node, mod_path, relative):
|
||||
"""notify an imported module, used to analyze dependencies
|
||||
"""
|
||||
module = node.root()
|
||||
context_name = module.name
|
||||
if relative:
|
||||
mod_path = '%s.%s' % ('.'.join(context_name.split('.')[:-1]),
|
||||
mod_path)
|
||||
if self.compute_module(context_name, mod_path):
|
||||
# handle dependencies
|
||||
if not hasattr(module, 'depends'):
|
||||
module.depends = []
|
||||
mod_paths = module.depends
|
||||
if not mod_path in mod_paths:
|
||||
mod_paths.append(mod_path)
|
||||
|
|
@ -1,391 +0,0 @@
|
|||
# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
|
||||
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
|
||||
#
|
||||
# This file is part of astroid.
|
||||
#
|
||||
# astroid is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by the
|
||||
# Free Software Foundation, either version 2.1 of the License, or (at your
|
||||
# option) any later version.
|
||||
#
|
||||
# astroid is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
|
||||
# for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License along
|
||||
# with astroid. If not, see <http://www.gnu.org/licenses/>.
|
||||
"""astroid manager: avoid multiple astroid build of a same module when
|
||||
possible by providing a class responsible to get astroid representation
|
||||
from various source and using a cache of built modules)
|
||||
"""
|
||||
from __future__ import print_function
|
||||
|
||||
__docformat__ = "restructuredtext en"
|
||||
|
||||
import collections
|
||||
import imp
|
||||
import os
|
||||
from os.path import dirname, join, isdir, exists
|
||||
from warnings import warn
|
||||
import zipimport
|
||||
|
||||
from logilab.common.configuration import OptionsProviderMixIn
|
||||
|
||||
from astroid.exceptions import AstroidBuildingException
|
||||
from astroid import modutils
|
||||
|
||||
|
||||
def astroid_wrapper(func, modname):
|
||||
"""wrapper to give to AstroidManager.project_from_files"""
|
||||
print('parsing %s...' % modname)
|
||||
try:
|
||||
return func(modname)
|
||||
except AstroidBuildingException as exc:
|
||||
print(exc)
|
||||
except Exception as exc:
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
|
||||
def _silent_no_wrap(func, modname):
|
||||
"""silent wrapper that doesn't do anything; can be used for tests"""
|
||||
return func(modname)
|
||||
|
||||
def safe_repr(obj):
|
||||
try:
|
||||
return repr(obj)
|
||||
except:
|
||||
return '???'
|
||||
|
||||
|
||||
|
||||
class AstroidManager(OptionsProviderMixIn):
|
||||
"""the astroid manager, responsible to build astroid from files
|
||||
or modules.
|
||||
|
||||
Use the Borg pattern.
|
||||
"""
|
||||
|
||||
name = 'astroid loader'
|
||||
options = (("ignore",
|
||||
{'type' : "csv", 'metavar' : "<file>",
|
||||
'dest' : "black_list", "default" : ('CVS',),
|
||||
'help' : "add <file> (may be a directory) to the black list\
|
||||
. It should be a base name, not a path. You may set this option multiple times\
|
||||
."}),
|
||||
("project",
|
||||
{'default': "No Name", 'type' : 'string', 'short': 'p',
|
||||
'metavar' : '<project name>',
|
||||
'help' : 'set the project name.'}),
|
||||
)
|
||||
brain = {}
|
||||
def __init__(self):
|
||||
self.__dict__ = AstroidManager.brain
|
||||
if not self.__dict__:
|
||||
OptionsProviderMixIn.__init__(self)
|
||||
self.load_defaults()
|
||||
# NOTE: cache entries are added by the [re]builder
|
||||
self.astroid_cache = {}
|
||||
self._mod_file_cache = {}
|
||||
self.transforms = collections.defaultdict(list)
|
||||
self._failed_import_hooks = []
|
||||
self.always_load_extensions = False
|
||||
self.optimize_ast = False
|
||||
self.extension_package_whitelist = set()
|
||||
|
||||
def ast_from_file(self, filepath, modname=None, fallback=True, source=False):
|
||||
"""given a module name, return the astroid object"""
|
||||
try:
|
||||
filepath = modutils.get_source_file(filepath, include_no_ext=True)
|
||||
source = True
|
||||
except modutils.NoSourceFile:
|
||||
pass
|
||||
if modname is None:
|
||||
try:
|
||||
modname = '.'.join(modutils.modpath_from_file(filepath))
|
||||
except ImportError:
|
||||
modname = filepath
|
||||
if modname in self.astroid_cache and self.astroid_cache[modname].file == filepath:
|
||||
return self.astroid_cache[modname]
|
||||
if source:
|
||||
from astroid.builder import AstroidBuilder
|
||||
return AstroidBuilder(self).file_build(filepath, modname)
|
||||
elif fallback and modname:
|
||||
return self.ast_from_module_name(modname)
|
||||
raise AstroidBuildingException('unable to get astroid for file %s' %
|
||||
filepath)
|
||||
|
||||
def _build_stub_module(self, modname):
|
||||
from astroid.builder import AstroidBuilder
|
||||
return AstroidBuilder(self).string_build('', modname)
|
||||
|
||||
def _can_load_extension(self, modname):
|
||||
if self.always_load_extensions:
|
||||
return True
|
||||
if modutils.is_standard_module(modname):
|
||||
return True
|
||||
parts = modname.split('.')
|
||||
return any(
|
||||
'.'.join(parts[:x]) in self.extension_package_whitelist
|
||||
for x in range(1, len(parts) + 1))
|
||||
|
||||
def ast_from_module_name(self, modname, context_file=None):
|
||||
"""given a module name, return the astroid object"""
|
||||
if modname in self.astroid_cache:
|
||||
return self.astroid_cache[modname]
|
||||
if modname == '__main__':
|
||||
return self._build_stub_module(modname)
|
||||
old_cwd = os.getcwd()
|
||||
if context_file:
|
||||
os.chdir(dirname(context_file))
|
||||
try:
|
||||
filepath, mp_type = self.file_from_module_name(modname, context_file)
|
||||
if mp_type == modutils.PY_ZIPMODULE:
|
||||
module = self.zip_import_data(filepath)
|
||||
if module is not None:
|
||||
return module
|
||||
elif mp_type in (imp.C_BUILTIN, imp.C_EXTENSION):
|
||||
if mp_type == imp.C_EXTENSION and not self._can_load_extension(modname):
|
||||
return self._build_stub_module(modname)
|
||||
try:
|
||||
module = modutils.load_module_from_name(modname)
|
||||
except Exception as ex:
|
||||
msg = 'Unable to load module %s (%s)' % (modname, ex)
|
||||
raise AstroidBuildingException(msg)
|
||||
return self.ast_from_module(module, modname)
|
||||
elif mp_type == imp.PY_COMPILED:
|
||||
raise AstroidBuildingException("Unable to load compiled module %s" % (modname,))
|
||||
if filepath is None:
|
||||
raise AstroidBuildingException("Unable to load module %s" % (modname,))
|
||||
return self.ast_from_file(filepath, modname, fallback=False)
|
||||
except AstroidBuildingException as e:
|
||||
for hook in self._failed_import_hooks:
|
||||
try:
|
||||
return hook(modname)
|
||||
except AstroidBuildingException:
|
||||
pass
|
||||
raise e
|
||||
finally:
|
||||
os.chdir(old_cwd)
|
||||
|
||||
def zip_import_data(self, filepath):
|
||||
if zipimport is None:
|
||||
return None
|
||||
from astroid.builder import AstroidBuilder
|
||||
builder = AstroidBuilder(self)
|
||||
for ext in ('.zip', '.egg'):
|
||||
try:
|
||||
eggpath, resource = filepath.rsplit(ext + os.path.sep, 1)
|
||||
except ValueError:
|
||||
continue
|
||||
try:
|
||||
importer = zipimport.zipimporter(eggpath + ext)
|
||||
zmodname = resource.replace(os.path.sep, '.')
|
||||
if importer.is_package(resource):
|
||||
zmodname = zmodname + '.__init__'
|
||||
module = builder.string_build(importer.get_source(resource),
|
||||
zmodname, filepath)
|
||||
return module
|
||||
except:
|
||||
continue
|
||||
return None
|
||||
|
||||
def file_from_module_name(self, modname, contextfile):
|
||||
try:
|
||||
value = self._mod_file_cache[(modname, contextfile)]
|
||||
except KeyError:
|
||||
try:
|
||||
value = modutils.file_info_from_modpath(
|
||||
modname.split('.'), context_file=contextfile)
|
||||
except ImportError as ex:
|
||||
msg = 'Unable to load module %s (%s)' % (modname, ex)
|
||||
value = AstroidBuildingException(msg)
|
||||
self._mod_file_cache[(modname, contextfile)] = value
|
||||
if isinstance(value, AstroidBuildingException):
|
||||
raise value
|
||||
return value
|
||||
|
||||
def ast_from_module(self, module, modname=None):
|
||||
"""given an imported module, return the astroid object"""
|
||||
modname = modname or module.__name__
|
||||
if modname in self.astroid_cache:
|
||||
return self.astroid_cache[modname]
|
||||
try:
|
||||
# some builtin modules don't have __file__ attribute
|
||||
filepath = module.__file__
|
||||
if modutils.is_python_source(filepath):
|
||||
return self.ast_from_file(filepath, modname)
|
||||
except AttributeError:
|
||||
pass
|
||||
from astroid.builder import AstroidBuilder
|
||||
return AstroidBuilder(self).module_build(module, modname)
|
||||
|
||||
def ast_from_class(self, klass, modname=None):
|
||||
"""get astroid for the given class"""
|
||||
if modname is None:
|
||||
try:
|
||||
modname = klass.__module__
|
||||
except AttributeError:
|
||||
raise AstroidBuildingException(
|
||||
'Unable to get module for class %s' % safe_repr(klass))
|
||||
modastroid = self.ast_from_module_name(modname)
|
||||
return modastroid.getattr(klass.__name__)[0] # XXX
|
||||
|
||||
|
||||
def infer_ast_from_something(self, obj, context=None):
|
||||
"""infer astroid for the given class"""
|
||||
if hasattr(obj, '__class__') and not isinstance(obj, type):
|
||||
klass = obj.__class__
|
||||
else:
|
||||
klass = obj
|
||||
try:
|
||||
modname = klass.__module__
|
||||
except AttributeError:
|
||||
raise AstroidBuildingException(
|
||||
'Unable to get module for %s' % safe_repr(klass))
|
||||
except Exception as ex:
|
||||
raise AstroidBuildingException(
|
||||
'Unexpected error while retrieving module for %s: %s'
|
||||
% (safe_repr(klass), ex))
|
||||
try:
|
||||
name = klass.__name__
|
||||
except AttributeError:
|
||||
raise AstroidBuildingException(
|
||||
'Unable to get name for %s' % safe_repr(klass))
|
||||
except Exception as ex:
|
||||
raise AstroidBuildingException(
|
||||
'Unexpected error while retrieving name for %s: %s'
|
||||
% (safe_repr(klass), ex))
|
||||
# take care, on living object __module__ is regularly wrong :(
|
||||
modastroid = self.ast_from_module_name(modname)
|
||||
if klass is obj:
|
||||
for infered in modastroid.igetattr(name, context):
|
||||
yield infered
|
||||
else:
|
||||
for infered in modastroid.igetattr(name, context):
|
||||
yield infered.instanciate_class()
|
||||
|
||||
def project_from_files(self, files, func_wrapper=astroid_wrapper,
|
||||
project_name=None, black_list=None):
|
||||
"""return a Project from a list of files or modules"""
|
||||
# build the project representation
|
||||
project_name = project_name or self.config.project
|
||||
black_list = black_list or self.config.black_list
|
||||
project = Project(project_name)
|
||||
for something in files:
|
||||
if not exists(something):
|
||||
fpath = modutils.file_from_modpath(something.split('.'))
|
||||
elif isdir(something):
|
||||
fpath = join(something, '__init__.py')
|
||||
else:
|
||||
fpath = something
|
||||
astroid = func_wrapper(self.ast_from_file, fpath)
|
||||
if astroid is None:
|
||||
continue
|
||||
# XXX why is first file defining the project.path ?
|
||||
project.path = project.path or astroid.file
|
||||
project.add_module(astroid)
|
||||
base_name = astroid.name
|
||||
# recurse in package except if __init__ was explicitly given
|
||||
if astroid.package and something.find('__init__') == -1:
|
||||
# recurse on others packages / modules if this is a package
|
||||
for fpath in modutils.get_module_files(dirname(astroid.file),
|
||||
black_list):
|
||||
astroid = func_wrapper(self.ast_from_file, fpath)
|
||||
if astroid is None or astroid.name == base_name:
|
||||
continue
|
||||
project.add_module(astroid)
|
||||
return project
|
||||
|
||||
def register_transform(self, node_class, transform, predicate=None):
|
||||
"""Register `transform(node)` function to be applied on the given
|
||||
Astroid's `node_class` if `predicate` is None or returns true
|
||||
when called with the node as argument.
|
||||
|
||||
The transform function may return a value which is then used to
|
||||
substitute the original node in the tree.
|
||||
"""
|
||||
self.transforms[node_class].append((transform, predicate))
|
||||
|
||||
def unregister_transform(self, node_class, transform, predicate=None):
|
||||
"""Unregister the given transform."""
|
||||
self.transforms[node_class].remove((transform, predicate))
|
||||
|
||||
def register_failed_import_hook(self, hook):
|
||||
"""Registers a hook to resolve imports that cannot be found otherwise.
|
||||
|
||||
`hook` must be a function that accepts a single argument `modname` which
|
||||
contains the name of the module or package that could not be imported.
|
||||
If `hook` can resolve the import, must return a node of type `astroid.Module`,
|
||||
otherwise, it must raise `AstroidBuildingException`.
|
||||
"""
|
||||
self._failed_import_hooks.append(hook)
|
||||
|
||||
def transform(self, node):
|
||||
"""Call matching transforms for the given node if any and return the
|
||||
transformed node.
|
||||
"""
|
||||
cls = node.__class__
|
||||
if cls not in self.transforms:
|
||||
# no transform registered for this class of node
|
||||
return node
|
||||
|
||||
transforms = self.transforms[cls]
|
||||
orig_node = node # copy the reference
|
||||
for transform_func, predicate in transforms:
|
||||
if predicate is None or predicate(node):
|
||||
ret = transform_func(node)
|
||||
# if the transformation function returns something, it's
|
||||
# expected to be a replacement for the node
|
||||
if ret is not None:
|
||||
if node is not orig_node:
|
||||
# node has already be modified by some previous
|
||||
# transformation, warn about it
|
||||
warn('node %s substituted multiple times' % node)
|
||||
node = ret
|
||||
return node
|
||||
|
||||
def cache_module(self, module):
|
||||
"""Cache a module if no module with the same name is known yet."""
|
||||
self.astroid_cache.setdefault(module.name, module)
|
||||
|
||||
def clear_cache(self, astroid_builtin=None):
|
||||
# XXX clear transforms
|
||||
self.astroid_cache.clear()
|
||||
# force bootstrap again, else we may ends up with cache inconsistency
|
||||
# between the manager and CONST_PROXY, making
|
||||
# unittest_lookup.LookupTC.test_builtin_lookup fail depending on the
|
||||
# test order
|
||||
import astroid.raw_building
|
||||
astroid.raw_building._astroid_bootstrapping(
|
||||
astroid_builtin=astroid_builtin)
|
||||
|
||||
|
||||
class Project(object):
|
||||
"""a project handle a set of modules / packages"""
|
||||
def __init__(self, name=''):
|
||||
self.name = name
|
||||
self.path = None
|
||||
self.modules = []
|
||||
self.locals = {}
|
||||
self.__getitem__ = self.locals.__getitem__
|
||||
self.__iter__ = self.locals.__iter__
|
||||
self.values = self.locals.values
|
||||
self.keys = self.locals.keys
|
||||
self.items = self.locals.items
|
||||
|
||||
def add_module(self, node):
|
||||
self.locals[node.name] = node
|
||||
self.modules.append(node)
|
||||
|
||||
def get_module(self, name):
|
||||
return self.locals[name]
|
||||
|
||||
def get_children(self):
|
||||
return self.modules
|
||||
|
||||
def __repr__(self):
|
||||
return '<Project %r at %s (%s modules)>' % (self.name, id(self),
|
||||
len(self.modules))
|
||||
|
||||
|
||||
|
|
@ -1,124 +0,0 @@
|
|||
# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
|
||||
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
|
||||
#
|
||||
# This file is part of astroid.
|
||||
#
|
||||
# astroid is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by the
|
||||
# Free Software Foundation, either version 2.1 of the License, or (at your
|
||||
# option) any later version.
|
||||
#
|
||||
# astroid is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
|
||||
# for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License along
|
||||
# with astroid. If not, see <http://www.gnu.org/licenses/>.
|
||||
"""This module contains some mixins for the different nodes.
|
||||
"""
|
||||
|
||||
from logilab.common.decorators import cachedproperty
|
||||
|
||||
from astroid.exceptions import (AstroidBuildingException, InferenceError,
|
||||
NotFoundError)
|
||||
|
||||
|
||||
class BlockRangeMixIn(object):
|
||||
"""override block range """
|
||||
|
||||
@cachedproperty
|
||||
def blockstart_tolineno(self):
|
||||
return self.lineno
|
||||
|
||||
def _elsed_block_range(self, lineno, orelse, last=None):
|
||||
"""handle block line numbers range for try/finally, for, if and while
|
||||
statements
|
||||
"""
|
||||
if lineno == self.fromlineno:
|
||||
return lineno, lineno
|
||||
if orelse:
|
||||
if lineno >= orelse[0].fromlineno:
|
||||
return lineno, orelse[-1].tolineno
|
||||
return lineno, orelse[0].fromlineno - 1
|
||||
return lineno, last or self.tolineno
|
||||
|
||||
|
||||
class FilterStmtsMixin(object):
|
||||
"""Mixin for statement filtering and assignment type"""
|
||||
|
||||
def _get_filtered_stmts(self, _, node, _stmts, mystmt):
|
||||
"""method used in _filter_stmts to get statemtents and trigger break"""
|
||||
if self.statement() is mystmt:
|
||||
# original node's statement is the assignment, only keep
|
||||
# current node (gen exp, list comp)
|
||||
return [node], True
|
||||
return _stmts, False
|
||||
|
||||
def ass_type(self):
|
||||
return self
|
||||
|
||||
|
||||
class AssignTypeMixin(object):
|
||||
|
||||
def ass_type(self):
|
||||
return self
|
||||
|
||||
def _get_filtered_stmts(self, lookup_node, node, _stmts, mystmt):
|
||||
"""method used in filter_stmts"""
|
||||
if self is mystmt:
|
||||
return _stmts, True
|
||||
if self.statement() is mystmt:
|
||||
# original node's statement is the assignment, only keep
|
||||
# current node (gen exp, list comp)
|
||||
return [node], True
|
||||
return _stmts, False
|
||||
|
||||
|
||||
class ParentAssignTypeMixin(AssignTypeMixin):
|
||||
|
||||
def ass_type(self):
|
||||
return self.parent.ass_type()
|
||||
|
||||
|
||||
class FromImportMixIn(FilterStmtsMixin):
|
||||
"""MixIn for From and Import Nodes"""
|
||||
|
||||
def _infer_name(self, frame, name):
|
||||
return name
|
||||
|
||||
def do_import_module(self, modname=None):
|
||||
"""return the ast for a module whose name is <modname> imported by <self>
|
||||
"""
|
||||
# handle special case where we are on a package node importing a module
|
||||
# using the same name as the package, which may end in an infinite loop
|
||||
# on relative imports
|
||||
# XXX: no more needed ?
|
||||
mymodule = self.root()
|
||||
level = getattr(self, 'level', None) # Import as no level
|
||||
if modname is None:
|
||||
modname = self.modname
|
||||
# XXX we should investigate deeper if we really want to check
|
||||
# importing itself: modname and mymodule.name be relative or absolute
|
||||
if mymodule.relative_to_absolute_name(modname, level) == mymodule.name:
|
||||
# FIXME: we used to raise InferenceError here, but why ?
|
||||
return mymodule
|
||||
try:
|
||||
return mymodule.import_module(modname, level=level)
|
||||
except AstroidBuildingException:
|
||||
raise InferenceError(modname)
|
||||
except SyntaxError as ex:
|
||||
raise InferenceError(str(ex))
|
||||
|
||||
def real_name(self, asname):
|
||||
"""get name from 'as' name"""
|
||||
for name, _asname in self.names:
|
||||
if name == '*':
|
||||
return asname
|
||||
if not _asname:
|
||||
name = name.split('.', 1)[0]
|
||||
_asname = name
|
||||
if asname == _asname:
|
||||
return name
|
||||
raise NotFoundError(asname)
|
||||
|
||||
|
|
@ -1,670 +0,0 @@
|
|||
# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
|
||||
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
|
||||
#
|
||||
# This file is part of astroid.
|
||||
#
|
||||
# astroid is free software: you can redistribute it and/or modify it under
|
||||
# the terms of the GNU Lesser General Public License as published by the Free
|
||||
# Software Foundation, either version 2.1 of the License, or (at your option) any
|
||||
# later version.
|
||||
#
|
||||
# astroid is distributed in the hope that it will be useful, but WITHOUT
|
||||
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
|
||||
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
|
||||
# details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License along
|
||||
# with astroid. If not, see <http://www.gnu.org/licenses/>.
|
||||
"""Python modules manipulation utility functions.
|
||||
|
||||
:type PY_SOURCE_EXTS: tuple(str)
|
||||
:var PY_SOURCE_EXTS: list of possible python source file extension
|
||||
|
||||
:type STD_LIB_DIRS: set of str
|
||||
:var STD_LIB_DIRS: directories where standard modules are located
|
||||
|
||||
:type BUILTIN_MODULES: dict
|
||||
:var BUILTIN_MODULES: dictionary with builtin module names has key
|
||||
"""
|
||||
from __future__ import with_statement
|
||||
|
||||
__docformat__ = "restructuredtext en"
|
||||
|
||||
import imp
|
||||
import os
|
||||
import sys
|
||||
from distutils.sysconfig import get_python_lib
|
||||
from distutils.errors import DistutilsPlatformError
|
||||
import zipimport
|
||||
|
||||
try:
|
||||
import pkg_resources
|
||||
except ImportError:
|
||||
pkg_resources = None
|
||||
|
||||
from logilab.common import _handle_blacklist
|
||||
|
||||
PY_ZIPMODULE = object()
|
||||
|
||||
if sys.platform.startswith('win'):
|
||||
PY_SOURCE_EXTS = ('py', 'pyw')
|
||||
PY_COMPILED_EXTS = ('dll', 'pyd')
|
||||
else:
|
||||
PY_SOURCE_EXTS = ('py',)
|
||||
PY_COMPILED_EXTS = ('so',)
|
||||
|
||||
# Notes about STD_LIB_DIRS
|
||||
# Consider arch-specific installation for STD_LIB_DIRS definition
|
||||
# :mod:`distutils.sysconfig` contains to much hardcoded values to rely on
|
||||
#
|
||||
# :see: `Problems with /usr/lib64 builds <http://bugs.python.org/issue1294959>`_
|
||||
# :see: `FHS <http://www.pathname.com/fhs/pub/fhs-2.3.html#LIBLTQUALGTALTERNATEFORMATESSENTIAL>`_
|
||||
try:
|
||||
# The explicit sys.prefix is to work around a patch in virtualenv that
|
||||
# replaces the 'real' sys.prefix (i.e. the location of the binary)
|
||||
# with the prefix from which the virtualenv was created. This throws
|
||||
# off the detection logic for standard library modules, thus the
|
||||
# workaround.
|
||||
STD_LIB_DIRS = set([
|
||||
get_python_lib(standard_lib=True, prefix=sys.prefix),
|
||||
# Take care of installations where exec_prefix != prefix.
|
||||
get_python_lib(standard_lib=True, prefix=sys.exec_prefix),
|
||||
get_python_lib(standard_lib=True)])
|
||||
if os.name == 'nt':
|
||||
STD_LIB_DIRS.add(os.path.join(sys.prefix, 'dlls'))
|
||||
try:
|
||||
# real_prefix is defined when running inside virtualenv.
|
||||
STD_LIB_DIRS.add(os.path.join(sys.real_prefix, 'dlls'))
|
||||
except AttributeError:
|
||||
pass
|
||||
# get_python_lib(standard_lib=1) is not available on pypy, set STD_LIB_DIR to
|
||||
# non-valid path, see https://bugs.pypy.org/issue1164
|
||||
except DistutilsPlatformError:
|
||||
STD_LIB_DIRS = set()
|
||||
|
||||
EXT_LIB_DIR = get_python_lib()
|
||||
|
||||
BUILTIN_MODULES = dict(zip(sys.builtin_module_names,
|
||||
[1]*len(sys.builtin_module_names)))
|
||||
|
||||
|
||||
class NoSourceFile(Exception):
|
||||
"""exception raised when we are not able to get a python
|
||||
source file for a precompiled file
|
||||
"""
|
||||
|
||||
def _normalize_path(path):
|
||||
return os.path.normcase(os.path.abspath(path))
|
||||
|
||||
|
||||
_NORM_PATH_CACHE = {}
|
||||
|
||||
def _cache_normalize_path(path):
|
||||
"""abspath with caching"""
|
||||
# _module_file calls abspath on every path in sys.path every time it's
|
||||
# called; on a larger codebase this easily adds up to half a second just
|
||||
# assembling path components. This cache alleviates that.
|
||||
try:
|
||||
return _NORM_PATH_CACHE[path]
|
||||
except KeyError:
|
||||
if not path: # don't cache result for ''
|
||||
return _normalize_path(path)
|
||||
result = _NORM_PATH_CACHE[path] = _normalize_path(path)
|
||||
return result
|
||||
|
||||
def load_module_from_name(dotted_name, path=None, use_sys=1):
|
||||
"""Load a Python module from its name.
|
||||
|
||||
:type dotted_name: str
|
||||
:param dotted_name: python name of a module or package
|
||||
|
||||
:type path: list or None
|
||||
:param path:
|
||||
optional list of path where the module or package should be
|
||||
searched (use sys.path if nothing or None is given)
|
||||
|
||||
:type use_sys: bool
|
||||
:param use_sys:
|
||||
boolean indicating whether the sys.modules dictionary should be
|
||||
used or not
|
||||
|
||||
|
||||
:raise ImportError: if the module or package is not found
|
||||
|
||||
:rtype: module
|
||||
:return: the loaded module
|
||||
"""
|
||||
return load_module_from_modpath(dotted_name.split('.'), path, use_sys)
|
||||
|
||||
|
||||
def load_module_from_modpath(parts, path=None, use_sys=1):
|
||||
"""Load a python module from its splitted name.
|
||||
|
||||
:type parts: list(str) or tuple(str)
|
||||
:param parts:
|
||||
python name of a module or package splitted on '.'
|
||||
|
||||
:type path: list or None
|
||||
:param path:
|
||||
optional list of path where the module or package should be
|
||||
searched (use sys.path if nothing or None is given)
|
||||
|
||||
:type use_sys: bool
|
||||
:param use_sys:
|
||||
boolean indicating whether the sys.modules dictionary should be used or not
|
||||
|
||||
:raise ImportError: if the module or package is not found
|
||||
|
||||
:rtype: module
|
||||
:return: the loaded module
|
||||
"""
|
||||
if use_sys:
|
||||
try:
|
||||
return sys.modules['.'.join(parts)]
|
||||
except KeyError:
|
||||
pass
|
||||
modpath = []
|
||||
prevmodule = None
|
||||
for part in parts:
|
||||
modpath.append(part)
|
||||
curname = '.'.join(modpath)
|
||||
module = None
|
||||
if len(modpath) != len(parts):
|
||||
# even with use_sys=False, should try to get outer packages from sys.modules
|
||||
module = sys.modules.get(curname)
|
||||
elif use_sys:
|
||||
# because it may have been indirectly loaded through a parent
|
||||
module = sys.modules.get(curname)
|
||||
if module is None:
|
||||
mp_file, mp_filename, mp_desc = imp.find_module(part, path)
|
||||
module = imp.load_module(curname, mp_file, mp_filename, mp_desc)
|
||||
# mp_file still needs to be closed.
|
||||
if mp_file:
|
||||
mp_file.close()
|
||||
if prevmodule:
|
||||
setattr(prevmodule, part, module)
|
||||
_file = getattr(module, '__file__', '')
|
||||
if not _file and len(modpath) != len(parts):
|
||||
raise ImportError('no module in %s' % '.'.join(parts[len(modpath):]))
|
||||
path = [os.path.dirname(_file)]
|
||||
prevmodule = module
|
||||
return module
|
||||
|
||||
|
||||
def load_module_from_file(filepath, path=None, use_sys=1, extrapath=None):
|
||||
"""Load a Python module from it's path.
|
||||
|
||||
:type filepath: str
|
||||
:param filepath: path to the python module or package
|
||||
|
||||
:type path: list or None
|
||||
:param path:
|
||||
optional list of path where the module or package should be
|
||||
searched (use sys.path if nothing or None is given)
|
||||
|
||||
:type use_sys: bool
|
||||
:param use_sys:
|
||||
boolean indicating whether the sys.modules dictionary should be
|
||||
used or not
|
||||
|
||||
|
||||
:raise ImportError: if the module or package is not found
|
||||
|
||||
:rtype: module
|
||||
:return: the loaded module
|
||||
"""
|
||||
modpath = modpath_from_file(filepath, extrapath)
|
||||
return load_module_from_modpath(modpath, path, use_sys)
|
||||
|
||||
|
||||
def _check_init(path, mod_path):
|
||||
"""check there are some __init__.py all along the way"""
|
||||
for part in mod_path:
|
||||
path = os.path.join(path, part)
|
||||
if not _has_init(path):
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def modpath_from_file(filename, extrapath=None):
|
||||
"""given a file path return the corresponding splitted module's name
|
||||
(i.e name of a module or package splitted on '.')
|
||||
|
||||
:type filename: str
|
||||
:param filename: file's path for which we want the module's name
|
||||
|
||||
:type extrapath: dict
|
||||
:param extrapath:
|
||||
optional extra search path, with path as key and package name for the path
|
||||
as value. This is usually useful to handle package splitted in multiple
|
||||
directories using __path__ trick.
|
||||
|
||||
|
||||
:raise ImportError:
|
||||
if the corresponding module's name has not been found
|
||||
|
||||
:rtype: list(str)
|
||||
:return: the corresponding splitted module's name
|
||||
"""
|
||||
base = os.path.splitext(os.path.abspath(filename))[0]
|
||||
if extrapath is not None:
|
||||
for path_ in extrapath:
|
||||
path = os.path.abspath(path_)
|
||||
if path and os.path.normcase(base[:len(path)]) == os.path.normcase(path):
|
||||
submodpath = [pkg for pkg in base[len(path):].split(os.sep)
|
||||
if pkg]
|
||||
if _check_init(path, submodpath[:-1]):
|
||||
return extrapath[path_].split('.') + submodpath
|
||||
for path in sys.path:
|
||||
path = _cache_normalize_path(path)
|
||||
if path and os.path.normcase(base).startswith(path):
|
||||
modpath = [pkg for pkg in base[len(path):].split(os.sep) if pkg]
|
||||
if _check_init(path, modpath[:-1]):
|
||||
return modpath
|
||||
raise ImportError('Unable to find module for %s in %s' % (
|
||||
filename, ', \n'.join(sys.path)))
|
||||
|
||||
|
||||
def file_from_modpath(modpath, path=None, context_file=None):
|
||||
return file_info_from_modpath(modpath, path, context_file)[0]
|
||||
|
||||
def file_info_from_modpath(modpath, path=None, context_file=None):
|
||||
"""given a mod path (i.e. splitted module / package name), return the
|
||||
corresponding file, giving priority to source file over precompiled
|
||||
file if it exists
|
||||
|
||||
:type modpath: list or tuple
|
||||
:param modpath:
|
||||
splitted module's name (i.e name of a module or package splitted
|
||||
on '.')
|
||||
(this means explicit relative imports that start with dots have
|
||||
empty strings in this list!)
|
||||
|
||||
:type path: list or None
|
||||
:param path:
|
||||
optional list of path where the module or package should be
|
||||
searched (use sys.path if nothing or None is given)
|
||||
|
||||
:type context_file: str or None
|
||||
:param context_file:
|
||||
context file to consider, necessary if the identifier has been
|
||||
introduced using a relative import unresolvable in the actual
|
||||
context (i.e. modutils)
|
||||
|
||||
:raise ImportError: if there is no such module in the directory
|
||||
|
||||
:rtype: (str or None, import type)
|
||||
:return:
|
||||
the path to the module's file or None if it's an integrated
|
||||
builtin module such as 'sys'
|
||||
"""
|
||||
if context_file is not None:
|
||||
context = os.path.dirname(context_file)
|
||||
else:
|
||||
context = context_file
|
||||
if modpath[0] == 'xml':
|
||||
# handle _xmlplus
|
||||
try:
|
||||
return _file_from_modpath(['_xmlplus'] + modpath[1:], path, context)
|
||||
except ImportError:
|
||||
return _file_from_modpath(modpath, path, context)
|
||||
elif modpath == ['os', 'path']:
|
||||
# FIXME: currently ignoring search_path...
|
||||
return os.path.__file__, imp.PY_SOURCE
|
||||
return _file_from_modpath(modpath, path, context)
|
||||
|
||||
|
||||
def get_module_part(dotted_name, context_file=None):
|
||||
"""given a dotted name return the module part of the name :
|
||||
|
||||
>>> get_module_part('logilab.common.modutils.get_module_part')
|
||||
'logilab.common.modutils'
|
||||
|
||||
:type dotted_name: str
|
||||
:param dotted_name: full name of the identifier we are interested in
|
||||
|
||||
:type context_file: str or None
|
||||
:param context_file:
|
||||
context file to consider, necessary if the identifier has been
|
||||
introduced using a relative import unresolvable in the actual
|
||||
context (i.e. modutils)
|
||||
|
||||
|
||||
:raise ImportError: if there is no such module in the directory
|
||||
|
||||
:rtype: str or None
|
||||
:return:
|
||||
the module part of the name or None if we have not been able at
|
||||
all to import the given name
|
||||
|
||||
XXX: deprecated, since it doesn't handle package precedence over module
|
||||
(see #10066)
|
||||
"""
|
||||
# os.path trick
|
||||
if dotted_name.startswith('os.path'):
|
||||
return 'os.path'
|
||||
parts = dotted_name.split('.')
|
||||
if context_file is not None:
|
||||
# first check for builtin module which won't be considered latter
|
||||
# in that case (path != None)
|
||||
if parts[0] in BUILTIN_MODULES:
|
||||
if len(parts) > 2:
|
||||
raise ImportError(dotted_name)
|
||||
return parts[0]
|
||||
# don't use += or insert, we want a new list to be created !
|
||||
path = None
|
||||
starti = 0
|
||||
if parts[0] == '':
|
||||
assert context_file is not None, \
|
||||
'explicit relative import, but no context_file?'
|
||||
path = [] # prevent resolving the import non-relatively
|
||||
starti = 1
|
||||
while parts[starti] == '': # for all further dots: change context
|
||||
starti += 1
|
||||
context_file = os.path.dirname(context_file)
|
||||
for i in range(starti, len(parts)):
|
||||
try:
|
||||
file_from_modpath(parts[starti:i+1], path=path,
|
||||
context_file=context_file)
|
||||
except ImportError:
|
||||
if not i >= max(1, len(parts) - 2):
|
||||
raise
|
||||
return '.'.join(parts[:i])
|
||||
return dotted_name
|
||||
|
||||
|
||||
def get_module_files(src_directory, blacklist):
|
||||
"""given a package directory return a list of all available python
|
||||
module's files in the package and its subpackages
|
||||
|
||||
:type src_directory: str
|
||||
:param src_directory:
|
||||
path of the directory corresponding to the package
|
||||
|
||||
:type blacklist: list or tuple
|
||||
:param blacklist:
|
||||
optional list of files or directory to ignore, default to the value of
|
||||
`logilab.common.STD_BLACKLIST`
|
||||
|
||||
:rtype: list
|
||||
:return:
|
||||
the list of all available python module's files in the package and
|
||||
its subpackages
|
||||
"""
|
||||
files = []
|
||||
for directory, dirnames, filenames in os.walk(src_directory):
|
||||
_handle_blacklist(blacklist, dirnames, filenames)
|
||||
# check for __init__.py
|
||||
if not '__init__.py' in filenames:
|
||||
dirnames[:] = ()
|
||||
continue
|
||||
for filename in filenames:
|
||||
if _is_python_file(filename):
|
||||
src = os.path.join(directory, filename)
|
||||
files.append(src)
|
||||
return files
|
||||
|
||||
|
||||
def get_source_file(filename, include_no_ext=False):
|
||||
"""given a python module's file name return the matching source file
|
||||
name (the filename will be returned identically if it's a already an
|
||||
absolute path to a python source file...)
|
||||
|
||||
:type filename: str
|
||||
:param filename: python module's file name
|
||||
|
||||
|
||||
:raise NoSourceFile: if no source file exists on the file system
|
||||
|
||||
:rtype: str
|
||||
:return: the absolute path of the source file if it exists
|
||||
"""
|
||||
base, orig_ext = os.path.splitext(os.path.abspath(filename))
|
||||
for ext in PY_SOURCE_EXTS:
|
||||
source_path = '%s.%s' % (base, ext)
|
||||
if os.path.exists(source_path):
|
||||
return source_path
|
||||
if include_no_ext and not orig_ext and os.path.exists(base):
|
||||
return base
|
||||
raise NoSourceFile(filename)
|
||||
|
||||
|
||||
def is_python_source(filename):
|
||||
"""
|
||||
rtype: bool
|
||||
return: True if the filename is a python source file
|
||||
"""
|
||||
return os.path.splitext(filename)[1][1:] in PY_SOURCE_EXTS
|
||||
|
||||
|
||||
def is_standard_module(modname, std_path=None):
|
||||
"""try to guess if a module is a standard python module (by default,
|
||||
see `std_path` parameter's description)
|
||||
|
||||
:type modname: str
|
||||
:param modname: name of the module we are interested in
|
||||
|
||||
:type std_path: list(str) or tuple(str)
|
||||
:param std_path: list of path considered has standard
|
||||
|
||||
|
||||
:rtype: bool
|
||||
:return:
|
||||
true if the module:
|
||||
- is located on the path listed in one of the directory in `std_path`
|
||||
- is a built-in module
|
||||
"""
|
||||
modname = modname.split('.')[0]
|
||||
try:
|
||||
filename = file_from_modpath([modname])
|
||||
except ImportError:
|
||||
# import failed, i'm probably not so wrong by supposing it's
|
||||
# not standard...
|
||||
return False
|
||||
# modules which are not living in a file are considered standard
|
||||
# (sys and __builtin__ for instance)
|
||||
if filename is None:
|
||||
return True
|
||||
filename = _normalize_path(filename)
|
||||
if filename.startswith(_cache_normalize_path(EXT_LIB_DIR)):
|
||||
return False
|
||||
if std_path is None:
|
||||
std_path = STD_LIB_DIRS
|
||||
for path in std_path:
|
||||
if filename.startswith(_cache_normalize_path(path)):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
|
||||
def is_relative(modname, from_file):
|
||||
"""return true if the given module name is relative to the given
|
||||
file name
|
||||
|
||||
:type modname: str
|
||||
:param modname: name of the module we are interested in
|
||||
|
||||
:type from_file: str
|
||||
:param from_file:
|
||||
path of the module from which modname has been imported
|
||||
|
||||
:rtype: bool
|
||||
:return:
|
||||
true if the module has been imported relatively to `from_file`
|
||||
"""
|
||||
if not os.path.isdir(from_file):
|
||||
from_file = os.path.dirname(from_file)
|
||||
if from_file in sys.path:
|
||||
return False
|
||||
try:
|
||||
stream, _, _ = imp.find_module(modname.split('.')[0], [from_file])
|
||||
|
||||
# Close the stream to avoid ResourceWarnings.
|
||||
if stream:
|
||||
stream.close()
|
||||
return True
|
||||
except ImportError:
|
||||
return False
|
||||
|
||||
|
||||
# internal only functions #####################################################
|
||||
|
||||
def _file_from_modpath(modpath, path=None, context=None):
|
||||
"""given a mod path (i.e. splitted module / package name), return the
|
||||
corresponding file
|
||||
|
||||
this function is used internally, see `file_from_modpath`'s
|
||||
documentation for more information
|
||||
"""
|
||||
assert len(modpath) > 0
|
||||
if context is not None:
|
||||
try:
|
||||
mtype, mp_filename = _module_file(modpath, [context])
|
||||
except ImportError:
|
||||
mtype, mp_filename = _module_file(modpath, path)
|
||||
else:
|
||||
mtype, mp_filename = _module_file(modpath, path)
|
||||
if mtype == imp.PY_COMPILED:
|
||||
try:
|
||||
return get_source_file(mp_filename), imp.PY_SOURCE
|
||||
except NoSourceFile:
|
||||
return mp_filename, imp.PY_COMPILED
|
||||
elif mtype == imp.C_BUILTIN:
|
||||
# integrated builtin module
|
||||
return None, imp.C_BUILTIN
|
||||
elif mtype == imp.PKG_DIRECTORY:
|
||||
mp_filename = _has_init(mp_filename)
|
||||
mtype = imp.PY_SOURCE
|
||||
return mp_filename, mtype
|
||||
|
||||
def _search_zip(modpath, pic):
|
||||
for filepath, importer in pic.items():
|
||||
if importer is not None:
|
||||
if importer.find_module(modpath[0]):
|
||||
if not importer.find_module(os.path.sep.join(modpath)):
|
||||
raise ImportError('No module named %s in %s/%s' % (
|
||||
'.'.join(modpath[1:]), filepath, modpath))
|
||||
return PY_ZIPMODULE, os.path.abspath(filepath) + os.path.sep + os.path.sep.join(modpath), filepath
|
||||
raise ImportError('No module named %s' % '.'.join(modpath))
|
||||
|
||||
|
||||
def _module_file(modpath, path=None):
|
||||
"""get a module type / file path
|
||||
|
||||
:type modpath: list or tuple
|
||||
:param modpath:
|
||||
splitted module's name (i.e name of a module or package splitted
|
||||
on '.'), with leading empty strings for explicit relative import
|
||||
|
||||
:type path: list or None
|
||||
:param path:
|
||||
optional list of path where the module or package should be
|
||||
searched (use sys.path if nothing or None is given)
|
||||
|
||||
|
||||
:rtype: tuple(int, str)
|
||||
:return: the module type flag and the file path for a module
|
||||
"""
|
||||
# egg support compat
|
||||
try:
|
||||
pic = sys.path_importer_cache
|
||||
_path = (path is None and sys.path or path)
|
||||
for __path in _path:
|
||||
if not __path in pic:
|
||||
try:
|
||||
pic[__path] = zipimport.zipimporter(__path)
|
||||
except zipimport.ZipImportError:
|
||||
pic[__path] = None
|
||||
checkeggs = True
|
||||
except AttributeError:
|
||||
checkeggs = False
|
||||
# pkg_resources support (aka setuptools namespace packages)
|
||||
if (pkg_resources is not None
|
||||
and modpath[0] in pkg_resources._namespace_packages
|
||||
and modpath[0] in sys.modules
|
||||
and len(modpath) > 1):
|
||||
# setuptools has added into sys.modules a module object with proper
|
||||
# __path__, get back information from there
|
||||
module = sys.modules[modpath.pop(0)]
|
||||
path = module.__path__
|
||||
imported = []
|
||||
while modpath:
|
||||
modname = modpath[0]
|
||||
# take care to changes in find_module implementation wrt builtin modules
|
||||
#
|
||||
# Python 2.6.6 (r266:84292, Sep 11 2012, 08:34:23)
|
||||
# >>> imp.find_module('posix')
|
||||
# (None, 'posix', ('', '', 6))
|
||||
#
|
||||
# Python 3.3.1 (default, Apr 26 2013, 12:08:46)
|
||||
# >>> imp.find_module('posix')
|
||||
# (None, None, ('', '', 6))
|
||||
try:
|
||||
stream, mp_filename, mp_desc = imp.find_module(modname, path)
|
||||
except ImportError:
|
||||
if checkeggs:
|
||||
return _search_zip(modpath, pic)[:2]
|
||||
raise
|
||||
else:
|
||||
# Don't forget to close the stream to avoid
|
||||
# spurious ResourceWarnings.
|
||||
if stream:
|
||||
stream.close()
|
||||
|
||||
if checkeggs and mp_filename:
|
||||
fullabspath = [_cache_normalize_path(x) for x in _path]
|
||||
try:
|
||||
pathindex = fullabspath.index(os.path.dirname(_normalize_path(mp_filename)))
|
||||
emtype, emp_filename, zippath = _search_zip(modpath, pic)
|
||||
if pathindex > _path.index(zippath):
|
||||
# an egg takes priority
|
||||
return emtype, emp_filename
|
||||
except ValueError:
|
||||
# XXX not in _path
|
||||
pass
|
||||
except ImportError:
|
||||
pass
|
||||
checkeggs = False
|
||||
imported.append(modpath.pop(0))
|
||||
mtype = mp_desc[2]
|
||||
if modpath:
|
||||
if mtype != imp.PKG_DIRECTORY:
|
||||
raise ImportError('No module %s in %s' % ('.'.join(modpath),
|
||||
'.'.join(imported)))
|
||||
# XXX guess if package is using pkgutil.extend_path by looking for
|
||||
# those keywords in the first four Kbytes
|
||||
try:
|
||||
with open(os.path.join(mp_filename, '__init__.py'), 'rb') as stream:
|
||||
data = stream.read(4096)
|
||||
except IOError:
|
||||
path = [mp_filename]
|
||||
else:
|
||||
if b'pkgutil' in data and b'extend_path' in data:
|
||||
# extend_path is called, search sys.path for module/packages
|
||||
# of this name see pkgutil.extend_path documentation
|
||||
path = [os.path.join(p, *imported) for p in sys.path
|
||||
if os.path.isdir(os.path.join(p, *imported))]
|
||||
else:
|
||||
path = [mp_filename]
|
||||
return mtype, mp_filename
|
||||
|
||||
def _is_python_file(filename):
|
||||
"""return true if the given filename should be considered as a python file
|
||||
|
||||
.pyc and .pyo are ignored
|
||||
"""
|
||||
for ext in ('.py', '.so', '.pyd', '.pyw'):
|
||||
if filename.endswith(ext):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def _has_init(directory):
|
||||
"""if the given directory has a valid __init__ file, return its path,
|
||||
else return None
|
||||
"""
|
||||
mod_or_pack = os.path.join(directory, '__init__')
|
||||
for ext in PY_SOURCE_EXTS + ('pyc', 'pyo'):
|
||||
if os.path.exists(mod_or_pack + '.' + ext):
|
||||
return mod_or_pack + '.' + ext
|
||||
return None
|
||||
|
|
@ -1,966 +0,0 @@
|
|||
# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
|
||||
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
|
||||
#
|
||||
# This file is part of astroid.
|
||||
#
|
||||
# astroid is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by the
|
||||
# Free Software Foundation, either version 2.1 of the License, or (at your
|
||||
# option) any later version.
|
||||
#
|
||||
# astroid is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
|
||||
# for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License along
|
||||
# with astroid. If not, see <http://www.gnu.org/licenses/>.
|
||||
"""Module for some node classes. More nodes in scoped_nodes.py
|
||||
"""
|
||||
|
||||
import sys
|
||||
|
||||
import six
|
||||
from logilab.common.decorators import cachedproperty
|
||||
|
||||
from astroid.exceptions import NoDefault
|
||||
from astroid.bases import (NodeNG, Statement, Instance, InferenceContext,
|
||||
_infer_stmts, YES, BUILTINS)
|
||||
from astroid.mixins import (BlockRangeMixIn, AssignTypeMixin,
|
||||
ParentAssignTypeMixin, FromImportMixIn)
|
||||
|
||||
PY3K = sys.version_info >= (3, 0)
|
||||
|
||||
|
||||
def unpack_infer(stmt, context=None):
|
||||
"""recursively generate nodes inferred by the given statement.
|
||||
If the inferred value is a list or a tuple, recurse on the elements
|
||||
"""
|
||||
if isinstance(stmt, (List, Tuple)):
|
||||
for elt in stmt.elts:
|
||||
for infered_elt in unpack_infer(elt, context):
|
||||
yield infered_elt
|
||||
return
|
||||
# if infered is a final node, return it and stop
|
||||
infered = next(stmt.infer(context))
|
||||
if infered is stmt:
|
||||
yield infered
|
||||
return
|
||||
# else, infer recursivly, except YES object that should be returned as is
|
||||
for infered in stmt.infer(context):
|
||||
if infered is YES:
|
||||
yield infered
|
||||
else:
|
||||
for inf_inf in unpack_infer(infered, context):
|
||||
yield inf_inf
|
||||
|
||||
|
||||
def are_exclusive(stmt1, stmt2, exceptions=None):
|
||||
"""return true if the two given statements are mutually exclusive
|
||||
|
||||
`exceptions` may be a list of exception names. If specified, discard If
|
||||
branches and check one of the statement is in an exception handler catching
|
||||
one of the given exceptions.
|
||||
|
||||
algorithm :
|
||||
1) index stmt1's parents
|
||||
2) climb among stmt2's parents until we find a common parent
|
||||
3) if the common parent is a If or TryExcept statement, look if nodes are
|
||||
in exclusive branches
|
||||
"""
|
||||
# index stmt1's parents
|
||||
stmt1_parents = {}
|
||||
children = {}
|
||||
node = stmt1.parent
|
||||
previous = stmt1
|
||||
while node:
|
||||
stmt1_parents[node] = 1
|
||||
children[node] = previous
|
||||
previous = node
|
||||
node = node.parent
|
||||
# climb among stmt2's parents until we find a common parent
|
||||
node = stmt2.parent
|
||||
previous = stmt2
|
||||
while node:
|
||||
if node in stmt1_parents:
|
||||
# if the common parent is a If or TryExcept statement, look if
|
||||
# nodes are in exclusive branches
|
||||
if isinstance(node, If) and exceptions is None:
|
||||
if (node.locate_child(previous)[1]
|
||||
is not node.locate_child(children[node])[1]):
|
||||
return True
|
||||
elif isinstance(node, TryExcept):
|
||||
c2attr, c2node = node.locate_child(previous)
|
||||
c1attr, c1node = node.locate_child(children[node])
|
||||
if c1node is not c2node:
|
||||
if ((c2attr == 'body' and c1attr == 'handlers' and children[node].catch(exceptions)) or
|
||||
(c2attr == 'handlers' and c1attr == 'body' and previous.catch(exceptions)) or
|
||||
(c2attr == 'handlers' and c1attr == 'orelse') or
|
||||
(c2attr == 'orelse' and c1attr == 'handlers')):
|
||||
return True
|
||||
elif c2attr == 'handlers' and c1attr == 'handlers':
|
||||
return previous is not children[node]
|
||||
return False
|
||||
previous = node
|
||||
node = node.parent
|
||||
return False
|
||||
|
||||
|
||||
class LookupMixIn(object):
|
||||
"""Mixin looking up a name in the right scope
|
||||
"""
|
||||
|
||||
def lookup(self, name):
|
||||
"""lookup a variable name
|
||||
|
||||
return the scope node and the list of assignments associated to the
|
||||
given name according to the scope where it has been found (locals,
|
||||
globals or builtin)
|
||||
|
||||
The lookup is starting from self's scope. If self is not a frame itself
|
||||
and the name is found in the inner frame locals, statements will be
|
||||
filtered to remove ignorable statements according to self's location
|
||||
"""
|
||||
return self.scope().scope_lookup(self, name)
|
||||
|
||||
def ilookup(self, name):
|
||||
"""infered lookup
|
||||
|
||||
return an iterator on infered values of the statements returned by
|
||||
the lookup method
|
||||
"""
|
||||
frame, stmts = self.lookup(name)
|
||||
context = InferenceContext()
|
||||
return _infer_stmts(stmts, context, frame)
|
||||
|
||||
def _filter_stmts(self, stmts, frame, offset):
|
||||
"""filter statements to remove ignorable statements.
|
||||
|
||||
If self is not a frame itself and the name is found in the inner
|
||||
frame locals, statements will be filtered to remove ignorable
|
||||
statements according to self's location
|
||||
"""
|
||||
# if offset == -1, my actual frame is not the inner frame but its parent
|
||||
#
|
||||
# class A(B): pass
|
||||
#
|
||||
# we need this to resolve B correctly
|
||||
if offset == -1:
|
||||
myframe = self.frame().parent.frame()
|
||||
else:
|
||||
myframe = self.frame()
|
||||
# If the frame of this node is the same as the statement
|
||||
# of this node, then the node is part of a class or
|
||||
# a function definition and the frame of this node should be the
|
||||
# the upper frame, not the frame of the definition.
|
||||
# For more information why this is important,
|
||||
# see Pylint issue #295.
|
||||
# For example, for 'b', the statement is the same
|
||||
# as the frame / scope:
|
||||
#
|
||||
# def test(b=1):
|
||||
# ...
|
||||
|
||||
if self.statement() is myframe and myframe.parent:
|
||||
myframe = myframe.parent.frame()
|
||||
if not myframe is frame or self is frame:
|
||||
return stmts
|
||||
mystmt = self.statement()
|
||||
# line filtering if we are in the same frame
|
||||
#
|
||||
# take care node may be missing lineno information (this is the case for
|
||||
# nodes inserted for living objects)
|
||||
if myframe is frame and mystmt.fromlineno is not None:
|
||||
assert mystmt.fromlineno is not None, mystmt
|
||||
mylineno = mystmt.fromlineno + offset
|
||||
else:
|
||||
# disabling lineno filtering
|
||||
mylineno = 0
|
||||
_stmts = []
|
||||
_stmt_parents = []
|
||||
for node in stmts:
|
||||
stmt = node.statement()
|
||||
# line filtering is on and we have reached our location, break
|
||||
if mylineno > 0 and stmt.fromlineno > mylineno:
|
||||
break
|
||||
assert hasattr(node, 'ass_type'), (node, node.scope(),
|
||||
node.scope().locals)
|
||||
ass_type = node.ass_type()
|
||||
|
||||
if node.has_base(self):
|
||||
break
|
||||
|
||||
_stmts, done = ass_type._get_filtered_stmts(self, node, _stmts, mystmt)
|
||||
if done:
|
||||
break
|
||||
|
||||
optional_assign = ass_type.optional_assign
|
||||
if optional_assign and ass_type.parent_of(self):
|
||||
# we are inside a loop, loop var assigment is hidding previous
|
||||
# assigment
|
||||
_stmts = [node]
|
||||
_stmt_parents = [stmt.parent]
|
||||
continue
|
||||
|
||||
# XXX comment various branches below!!!
|
||||
try:
|
||||
pindex = _stmt_parents.index(stmt.parent)
|
||||
except ValueError:
|
||||
pass
|
||||
else:
|
||||
# we got a parent index, this means the currently visited node
|
||||
# is at the same block level as a previously visited node
|
||||
if _stmts[pindex].ass_type().parent_of(ass_type):
|
||||
# both statements are not at the same block level
|
||||
continue
|
||||
# if currently visited node is following previously considered
|
||||
# assignement and both are not exclusive, we can drop the
|
||||
# previous one. For instance in the following code ::
|
||||
#
|
||||
# if a:
|
||||
# x = 1
|
||||
# else:
|
||||
# x = 2
|
||||
# print x
|
||||
#
|
||||
# we can't remove neither x = 1 nor x = 2 when looking for 'x'
|
||||
# of 'print x'; while in the following ::
|
||||
#
|
||||
# x = 1
|
||||
# x = 2
|
||||
# print x
|
||||
#
|
||||
# we can remove x = 1 when we see x = 2
|
||||
#
|
||||
# moreover, on loop assignment types, assignment won't
|
||||
# necessarily be done if the loop has no iteration, so we don't
|
||||
# want to clear previous assigments if any (hence the test on
|
||||
# optional_assign)
|
||||
if not (optional_assign or are_exclusive(_stmts[pindex], node)):
|
||||
del _stmt_parents[pindex]
|
||||
del _stmts[pindex]
|
||||
if isinstance(node, AssName):
|
||||
if not optional_assign and stmt.parent is mystmt.parent:
|
||||
_stmts = []
|
||||
_stmt_parents = []
|
||||
elif isinstance(node, DelName):
|
||||
_stmts = []
|
||||
_stmt_parents = []
|
||||
continue
|
||||
if not are_exclusive(self, node):
|
||||
_stmts.append(node)
|
||||
_stmt_parents.append(stmt.parent)
|
||||
return _stmts
|
||||
|
||||
# Name classes
|
||||
|
||||
class AssName(LookupMixIn, ParentAssignTypeMixin, NodeNG):
|
||||
"""class representing an AssName node"""
|
||||
|
||||
|
||||
class DelName(LookupMixIn, ParentAssignTypeMixin, NodeNG):
|
||||
"""class representing a DelName node"""
|
||||
|
||||
|
||||
class Name(LookupMixIn, NodeNG):
|
||||
"""class representing a Name node"""
|
||||
|
||||
|
||||
|
||||
|
||||
##################### node classes ########################################
|
||||
|
||||
class Arguments(NodeNG, AssignTypeMixin):
|
||||
"""class representing an Arguments node"""
|
||||
if PY3K:
|
||||
# Python 3.4+ uses a different approach regarding annotations,
|
||||
# each argument is a new class, _ast.arg, which exposes an
|
||||
# 'annotation' attribute. In astroid though, arguments are exposed
|
||||
# as is in the Arguments node and the only way to expose annotations
|
||||
# is by using something similar with Python 3.3:
|
||||
# - we expose 'varargannotation' and 'kwargannotation' of annotations
|
||||
# of varargs and kwargs.
|
||||
# - we expose 'annotation', a list with annotations for
|
||||
# for each normal argument. If an argument doesn't have an
|
||||
# annotation, its value will be None.
|
||||
|
||||
_astroid_fields = ('args', 'defaults', 'kwonlyargs',
|
||||
'kw_defaults', 'annotations',
|
||||
'varargannotation', 'kwargannotation')
|
||||
annotations = None
|
||||
varargannotation = None
|
||||
kwargannotation = None
|
||||
else:
|
||||
_astroid_fields = ('args', 'defaults', 'kwonlyargs', 'kw_defaults')
|
||||
args = None
|
||||
defaults = None
|
||||
kwonlyargs = None
|
||||
kw_defaults = None
|
||||
|
||||
def __init__(self, vararg=None, kwarg=None):
|
||||
self.vararg = vararg
|
||||
self.kwarg = kwarg
|
||||
|
||||
def _infer_name(self, frame, name):
|
||||
if self.parent is frame:
|
||||
return name
|
||||
return None
|
||||
|
||||
@cachedproperty
|
||||
def fromlineno(self):
|
||||
lineno = super(Arguments, self).fromlineno
|
||||
return max(lineno, self.parent.fromlineno or 0)
|
||||
|
||||
def format_args(self):
|
||||
"""return arguments formatted as string"""
|
||||
result = []
|
||||
if self.args:
|
||||
result.append(_format_args(self.args, self.defaults))
|
||||
if self.vararg:
|
||||
result.append('*%s' % self.vararg)
|
||||
if self.kwarg:
|
||||
result.append('**%s' % self.kwarg)
|
||||
if self.kwonlyargs:
|
||||
if not self.vararg:
|
||||
result.append('*')
|
||||
result.append(_format_args(self.kwonlyargs, self.kw_defaults))
|
||||
return ', '.join(result)
|
||||
|
||||
def default_value(self, argname):
|
||||
"""return the default value for an argument
|
||||
|
||||
:raise `NoDefault`: if there is no default value defined
|
||||
"""
|
||||
i = _find_arg(argname, self.args)[0]
|
||||
if i is not None:
|
||||
idx = i - (len(self.args) - len(self.defaults))
|
||||
if idx >= 0:
|
||||
return self.defaults[idx]
|
||||
i = _find_arg(argname, self.kwonlyargs)[0]
|
||||
if i is not None and self.kw_defaults[i] is not None:
|
||||
return self.kw_defaults[i]
|
||||
raise NoDefault()
|
||||
|
||||
def is_argument(self, name):
|
||||
"""return True if the name is defined in arguments"""
|
||||
if name == self.vararg:
|
||||
return True
|
||||
if name == self.kwarg:
|
||||
return True
|
||||
return self.find_argname(name, True)[1] is not None
|
||||
|
||||
def find_argname(self, argname, rec=False):
|
||||
"""return index and Name node with given name"""
|
||||
if self.args: # self.args may be None in some cases (builtin function)
|
||||
return _find_arg(argname, self.args, rec)
|
||||
return None, None
|
||||
|
||||
def get_children(self):
|
||||
"""override get_children to skip over None elements in kw_defaults"""
|
||||
for child in super(Arguments, self).get_children():
|
||||
if child is not None:
|
||||
yield child
|
||||
|
||||
|
||||
def _find_arg(argname, args, rec=False):
|
||||
for i, arg in enumerate(args):
|
||||
if isinstance(arg, Tuple):
|
||||
if rec:
|
||||
found = _find_arg(argname, arg.elts)
|
||||
if found[0] is not None:
|
||||
return found
|
||||
elif arg.name == argname:
|
||||
return i, arg
|
||||
return None, None
|
||||
|
||||
|
||||
def _format_args(args, defaults=None):
|
||||
values = []
|
||||
if args is None:
|
||||
return ''
|
||||
if defaults is not None:
|
||||
default_offset = len(args) - len(defaults)
|
||||
for i, arg in enumerate(args):
|
||||
if isinstance(arg, Tuple):
|
||||
values.append('(%s)' % _format_args(arg.elts))
|
||||
else:
|
||||
values.append(arg.name)
|
||||
if defaults is not None and i >= default_offset:
|
||||
if defaults[i-default_offset] is not None:
|
||||
values[-1] += '=' + defaults[i-default_offset].as_string()
|
||||
return ', '.join(values)
|
||||
|
||||
|
||||
class AssAttr(NodeNG, ParentAssignTypeMixin):
|
||||
"""class representing an AssAttr node"""
|
||||
_astroid_fields = ('expr',)
|
||||
expr = None
|
||||
|
||||
class Assert(Statement):
|
||||
"""class representing an Assert node"""
|
||||
_astroid_fields = ('test', 'fail',)
|
||||
test = None
|
||||
fail = None
|
||||
|
||||
class Assign(Statement, AssignTypeMixin):
|
||||
"""class representing an Assign node"""
|
||||
_astroid_fields = ('targets', 'value',)
|
||||
targets = None
|
||||
value = None
|
||||
|
||||
class AugAssign(Statement, AssignTypeMixin):
|
||||
"""class representing an AugAssign node"""
|
||||
_astroid_fields = ('target', 'value',)
|
||||
target = None
|
||||
value = None
|
||||
|
||||
class Backquote(NodeNG):
|
||||
"""class representing a Backquote node"""
|
||||
_astroid_fields = ('value',)
|
||||
value = None
|
||||
|
||||
class BinOp(NodeNG):
|
||||
"""class representing a BinOp node"""
|
||||
_astroid_fields = ('left', 'right',)
|
||||
left = None
|
||||
right = None
|
||||
|
||||
class BoolOp(NodeNG):
|
||||
"""class representing a BoolOp node"""
|
||||
_astroid_fields = ('values',)
|
||||
values = None
|
||||
|
||||
class Break(Statement):
|
||||
"""class representing a Break node"""
|
||||
|
||||
|
||||
class CallFunc(NodeNG):
|
||||
"""class representing a CallFunc node"""
|
||||
_astroid_fields = ('func', 'args', 'starargs', 'kwargs')
|
||||
func = None
|
||||
args = None
|
||||
starargs = None
|
||||
kwargs = None
|
||||
|
||||
def __init__(self):
|
||||
self.starargs = None
|
||||
self.kwargs = None
|
||||
|
||||
class Compare(NodeNG):
|
||||
"""class representing a Compare node"""
|
||||
_astroid_fields = ('left', 'ops',)
|
||||
left = None
|
||||
ops = None
|
||||
|
||||
def get_children(self):
|
||||
"""override get_children for tuple fields"""
|
||||
yield self.left
|
||||
for _, comparator in self.ops:
|
||||
yield comparator # we don't want the 'op'
|
||||
|
||||
def last_child(self):
|
||||
"""override last_child"""
|
||||
# XXX maybe if self.ops:
|
||||
return self.ops[-1][1]
|
||||
#return self.left
|
||||
|
||||
class Comprehension(NodeNG):
|
||||
"""class representing a Comprehension node"""
|
||||
_astroid_fields = ('target', 'iter', 'ifs')
|
||||
target = None
|
||||
iter = None
|
||||
ifs = None
|
||||
|
||||
optional_assign = True
|
||||
def ass_type(self):
|
||||
return self
|
||||
|
||||
def _get_filtered_stmts(self, lookup_node, node, stmts, mystmt):
|
||||
"""method used in filter_stmts"""
|
||||
if self is mystmt:
|
||||
if isinstance(lookup_node, (Const, Name)):
|
||||
return [lookup_node], True
|
||||
|
||||
elif self.statement() is mystmt:
|
||||
# original node's statement is the assignment, only keeps
|
||||
# current node (gen exp, list comp)
|
||||
|
||||
return [node], True
|
||||
|
||||
return stmts, False
|
||||
|
||||
|
||||
class Const(NodeNG, Instance):
|
||||
"""represent a constant node like num, str, bool, None, bytes"""
|
||||
|
||||
def __init__(self, value=None):
|
||||
self.value = value
|
||||
|
||||
def getitem(self, index, context=None):
|
||||
if isinstance(self.value, six.string_types):
|
||||
return Const(self.value[index])
|
||||
raise TypeError('%r (value=%s)' % (self, self.value))
|
||||
|
||||
def has_dynamic_getattr(self):
|
||||
return False
|
||||
|
||||
def itered(self):
|
||||
if isinstance(self.value, six.string_types):
|
||||
return self.value
|
||||
raise TypeError()
|
||||
|
||||
def pytype(self):
|
||||
return self._proxied.qname()
|
||||
|
||||
|
||||
class Continue(Statement):
|
||||
"""class representing a Continue node"""
|
||||
|
||||
|
||||
class Decorators(NodeNG):
|
||||
"""class representing a Decorators node"""
|
||||
_astroid_fields = ('nodes',)
|
||||
nodes = None
|
||||
|
||||
def __init__(self, nodes=None):
|
||||
self.nodes = nodes
|
||||
|
||||
def scope(self):
|
||||
# skip the function node to go directly to the upper level scope
|
||||
return self.parent.parent.scope()
|
||||
|
||||
class DelAttr(NodeNG, ParentAssignTypeMixin):
|
||||
"""class representing a DelAttr node"""
|
||||
_astroid_fields = ('expr',)
|
||||
expr = None
|
||||
|
||||
|
||||
class Delete(Statement, AssignTypeMixin):
|
||||
"""class representing a Delete node"""
|
||||
_astroid_fields = ('targets',)
|
||||
targets = None
|
||||
|
||||
|
||||
class Dict(NodeNG, Instance):
|
||||
"""class representing a Dict node"""
|
||||
_astroid_fields = ('items',)
|
||||
|
||||
def __init__(self, items=None):
|
||||
if items is None:
|
||||
self.items = []
|
||||
else:
|
||||
self.items = [(const_factory(k), const_factory(v))
|
||||
for k, v in items.items()]
|
||||
|
||||
def pytype(self):
|
||||
return '%s.dict' % BUILTINS
|
||||
|
||||
def get_children(self):
|
||||
"""get children of a Dict node"""
|
||||
# overrides get_children
|
||||
for key, value in self.items:
|
||||
yield key
|
||||
yield value
|
||||
|
||||
def last_child(self):
|
||||
"""override last_child"""
|
||||
if self.items:
|
||||
return self.items[-1][1]
|
||||
return None
|
||||
|
||||
def itered(self):
|
||||
return self.items[::2]
|
||||
|
||||
def getitem(self, lookup_key, context=None):
|
||||
for key, value in self.items:
|
||||
for inferedkey in key.infer(context):
|
||||
if inferedkey is YES:
|
||||
continue
|
||||
if isinstance(inferedkey, Const) \
|
||||
and inferedkey.value == lookup_key:
|
||||
return value
|
||||
# This should raise KeyError, but all call sites only catch
|
||||
# IndexError. Let's leave it like that for now.
|
||||
raise IndexError(lookup_key)
|
||||
|
||||
|
||||
class Discard(Statement):
|
||||
"""class representing a Discard node"""
|
||||
_astroid_fields = ('value',)
|
||||
value = None
|
||||
|
||||
|
||||
class Ellipsis(NodeNG):
|
||||
"""class representing an Ellipsis node"""
|
||||
|
||||
|
||||
class EmptyNode(NodeNG):
|
||||
"""class representing an EmptyNode node"""
|
||||
|
||||
|
||||
class ExceptHandler(Statement, AssignTypeMixin):
|
||||
"""class representing an ExceptHandler node"""
|
||||
_astroid_fields = ('type', 'name', 'body',)
|
||||
type = None
|
||||
name = None
|
||||
body = None
|
||||
|
||||
@cachedproperty
|
||||
def blockstart_tolineno(self):
|
||||
if self.name:
|
||||
return self.name.tolineno
|
||||
elif self.type:
|
||||
return self.type.tolineno
|
||||
else:
|
||||
return self.lineno
|
||||
|
||||
def catch(self, exceptions):
|
||||
if self.type is None or exceptions is None:
|
||||
return True
|
||||
for node in self.type.nodes_of_class(Name):
|
||||
if node.name in exceptions:
|
||||
return True
|
||||
|
||||
|
||||
class Exec(Statement):
|
||||
"""class representing an Exec node"""
|
||||
_astroid_fields = ('expr', 'globals', 'locals',)
|
||||
expr = None
|
||||
globals = None
|
||||
locals = None
|
||||
|
||||
|
||||
class ExtSlice(NodeNG):
|
||||
"""class representing an ExtSlice node"""
|
||||
_astroid_fields = ('dims',)
|
||||
dims = None
|
||||
|
||||
class For(BlockRangeMixIn, AssignTypeMixin, Statement):
|
||||
"""class representing a For node"""
|
||||
_astroid_fields = ('target', 'iter', 'body', 'orelse',)
|
||||
target = None
|
||||
iter = None
|
||||
body = None
|
||||
orelse = None
|
||||
|
||||
optional_assign = True
|
||||
@cachedproperty
|
||||
def blockstart_tolineno(self):
|
||||
return self.iter.tolineno
|
||||
|
||||
|
||||
class From(FromImportMixIn, Statement):
|
||||
"""class representing a From node"""
|
||||
|
||||
def __init__(self, fromname, names, level=0):
|
||||
self.modname = fromname
|
||||
self.names = names
|
||||
self.level = level
|
||||
|
||||
class Getattr(NodeNG):
|
||||
"""class representing a Getattr node"""
|
||||
_astroid_fields = ('expr',)
|
||||
expr = None
|
||||
|
||||
|
||||
class Global(Statement):
|
||||
"""class representing a Global node"""
|
||||
|
||||
def __init__(self, names):
|
||||
self.names = names
|
||||
|
||||
def _infer_name(self, frame, name):
|
||||
return name
|
||||
|
||||
|
||||
class If(BlockRangeMixIn, Statement):
|
||||
"""class representing an If node"""
|
||||
_astroid_fields = ('test', 'body', 'orelse')
|
||||
test = None
|
||||
body = None
|
||||
orelse = None
|
||||
|
||||
@cachedproperty
|
||||
def blockstart_tolineno(self):
|
||||
return self.test.tolineno
|
||||
|
||||
def block_range(self, lineno):
|
||||
"""handle block line numbers range for if statements"""
|
||||
if lineno == self.body[0].fromlineno:
|
||||
return lineno, lineno
|
||||
if lineno <= self.body[-1].tolineno:
|
||||
return lineno, self.body[-1].tolineno
|
||||
return self._elsed_block_range(lineno, self.orelse,
|
||||
self.body[0].fromlineno - 1)
|
||||
|
||||
|
||||
class IfExp(NodeNG):
|
||||
"""class representing an IfExp node"""
|
||||
_astroid_fields = ('test', 'body', 'orelse')
|
||||
test = None
|
||||
body = None
|
||||
orelse = None
|
||||
|
||||
|
||||
class Import(FromImportMixIn, Statement):
|
||||
"""class representing an Import node"""
|
||||
|
||||
|
||||
class Index(NodeNG):
|
||||
"""class representing an Index node"""
|
||||
_astroid_fields = ('value',)
|
||||
value = None
|
||||
|
||||
|
||||
class Keyword(NodeNG):
|
||||
"""class representing a Keyword node"""
|
||||
_astroid_fields = ('value',)
|
||||
value = None
|
||||
|
||||
|
||||
class List(NodeNG, Instance, ParentAssignTypeMixin):
|
||||
"""class representing a List node"""
|
||||
_astroid_fields = ('elts',)
|
||||
|
||||
def __init__(self, elts=None):
|
||||
if elts is None:
|
||||
self.elts = []
|
||||
else:
|
||||
self.elts = [const_factory(e) for e in elts]
|
||||
|
||||
def pytype(self):
|
||||
return '%s.list' % BUILTINS
|
||||
|
||||
def getitem(self, index, context=None):
|
||||
return self.elts[index]
|
||||
|
||||
def itered(self):
|
||||
return self.elts
|
||||
|
||||
|
||||
class Nonlocal(Statement):
|
||||
"""class representing a Nonlocal node"""
|
||||
|
||||
def __init__(self, names):
|
||||
self.names = names
|
||||
|
||||
def _infer_name(self, frame, name):
|
||||
return name
|
||||
|
||||
|
||||
class Pass(Statement):
|
||||
"""class representing a Pass node"""
|
||||
|
||||
|
||||
class Print(Statement):
|
||||
"""class representing a Print node"""
|
||||
_astroid_fields = ('dest', 'values',)
|
||||
dest = None
|
||||
values = None
|
||||
|
||||
|
||||
class Raise(Statement):
|
||||
"""class representing a Raise node"""
|
||||
exc = None
|
||||
if sys.version_info < (3, 0):
|
||||
_astroid_fields = ('exc', 'inst', 'tback')
|
||||
inst = None
|
||||
tback = None
|
||||
else:
|
||||
_astroid_fields = ('exc', 'cause')
|
||||
exc = None
|
||||
cause = None
|
||||
|
||||
def raises_not_implemented(self):
|
||||
if not self.exc:
|
||||
return
|
||||
for name in self.exc.nodes_of_class(Name):
|
||||
if name.name == 'NotImplementedError':
|
||||
return True
|
||||
|
||||
|
||||
class Return(Statement):
|
||||
"""class representing a Return node"""
|
||||
_astroid_fields = ('value',)
|
||||
value = None
|
||||
|
||||
|
||||
class Set(NodeNG, Instance, ParentAssignTypeMixin):
|
||||
"""class representing a Set node"""
|
||||
_astroid_fields = ('elts',)
|
||||
|
||||
def __init__(self, elts=None):
|
||||
if elts is None:
|
||||
self.elts = []
|
||||
else:
|
||||
self.elts = [const_factory(e) for e in elts]
|
||||
|
||||
def pytype(self):
|
||||
return '%s.set' % BUILTINS
|
||||
|
||||
def itered(self):
|
||||
return self.elts
|
||||
|
||||
|
||||
class Slice(NodeNG):
|
||||
"""class representing a Slice node"""
|
||||
_astroid_fields = ('lower', 'upper', 'step')
|
||||
lower = None
|
||||
upper = None
|
||||
step = None
|
||||
|
||||
class Starred(NodeNG, ParentAssignTypeMixin):
|
||||
"""class representing a Starred node"""
|
||||
_astroid_fields = ('value',)
|
||||
value = None
|
||||
|
||||
|
||||
class Subscript(NodeNG):
|
||||
"""class representing a Subscript node"""
|
||||
_astroid_fields = ('value', 'slice')
|
||||
value = None
|
||||
slice = None
|
||||
|
||||
|
||||
class TryExcept(BlockRangeMixIn, Statement):
|
||||
"""class representing a TryExcept node"""
|
||||
_astroid_fields = ('body', 'handlers', 'orelse',)
|
||||
body = None
|
||||
handlers = None
|
||||
orelse = None
|
||||
|
||||
def _infer_name(self, frame, name):
|
||||
return name
|
||||
|
||||
def block_range(self, lineno):
|
||||
"""handle block line numbers range for try/except statements"""
|
||||
last = None
|
||||
for exhandler in self.handlers:
|
||||
if exhandler.type and lineno == exhandler.type.fromlineno:
|
||||
return lineno, lineno
|
||||
if exhandler.body[0].fromlineno <= lineno <= exhandler.body[-1].tolineno:
|
||||
return lineno, exhandler.body[-1].tolineno
|
||||
if last is None:
|
||||
last = exhandler.body[0].fromlineno - 1
|
||||
return self._elsed_block_range(lineno, self.orelse, last)
|
||||
|
||||
|
||||
class TryFinally(BlockRangeMixIn, Statement):
|
||||
"""class representing a TryFinally node"""
|
||||
_astroid_fields = ('body', 'finalbody',)
|
||||
body = None
|
||||
finalbody = None
|
||||
|
||||
def block_range(self, lineno):
|
||||
"""handle block line numbers range for try/finally statements"""
|
||||
child = self.body[0]
|
||||
# py2.5 try: except: finally:
|
||||
if (isinstance(child, TryExcept) and child.fromlineno == self.fromlineno
|
||||
and lineno > self.fromlineno and lineno <= child.tolineno):
|
||||
return child.block_range(lineno)
|
||||
return self._elsed_block_range(lineno, self.finalbody)
|
||||
|
||||
|
||||
class Tuple(NodeNG, Instance, ParentAssignTypeMixin):
|
||||
"""class representing a Tuple node"""
|
||||
_astroid_fields = ('elts',)
|
||||
|
||||
def __init__(self, elts=None):
|
||||
if elts is None:
|
||||
self.elts = []
|
||||
else:
|
||||
self.elts = [const_factory(e) for e in elts]
|
||||
|
||||
def pytype(self):
|
||||
return '%s.tuple' % BUILTINS
|
||||
|
||||
def getitem(self, index, context=None):
|
||||
return self.elts[index]
|
||||
|
||||
def itered(self):
|
||||
return self.elts
|
||||
|
||||
|
||||
class UnaryOp(NodeNG):
|
||||
"""class representing an UnaryOp node"""
|
||||
_astroid_fields = ('operand',)
|
||||
operand = None
|
||||
|
||||
|
||||
class While(BlockRangeMixIn, Statement):
|
||||
"""class representing a While node"""
|
||||
_astroid_fields = ('test', 'body', 'orelse',)
|
||||
test = None
|
||||
body = None
|
||||
orelse = None
|
||||
|
||||
@cachedproperty
|
||||
def blockstart_tolineno(self):
|
||||
return self.test.tolineno
|
||||
|
||||
def block_range(self, lineno):
|
||||
"""handle block line numbers range for for and while statements"""
|
||||
return self. _elsed_block_range(lineno, self.orelse)
|
||||
|
||||
|
||||
class With(BlockRangeMixIn, AssignTypeMixin, Statement):
|
||||
"""class representing a With node"""
|
||||
_astroid_fields = ('items', 'body')
|
||||
items = None
|
||||
body = None
|
||||
|
||||
@cachedproperty
|
||||
def blockstart_tolineno(self):
|
||||
return self.items[-1][0].tolineno
|
||||
|
||||
def get_children(self):
|
||||
for expr, var in self.items:
|
||||
yield expr
|
||||
if var:
|
||||
yield var
|
||||
for elt in self.body:
|
||||
yield elt
|
||||
|
||||
class Yield(NodeNG):
|
||||
"""class representing a Yield node"""
|
||||
_astroid_fields = ('value',)
|
||||
value = None
|
||||
|
||||
class YieldFrom(Yield):
|
||||
""" Class representing a YieldFrom node. """
|
||||
|
||||
# constants ##############################################################
|
||||
|
||||
CONST_CLS = {
|
||||
list: List,
|
||||
tuple: Tuple,
|
||||
dict: Dict,
|
||||
set: Set,
|
||||
type(None): Const,
|
||||
}
|
||||
|
||||
def _update_const_classes():
|
||||
"""update constant classes, so the keys of CONST_CLS can be reused"""
|
||||
klasses = (bool, int, float, complex, str)
|
||||
if sys.version_info < (3, 0):
|
||||
klasses += (unicode, long)
|
||||
if sys.version_info >= (2, 6):
|
||||
klasses += (bytes,)
|
||||
for kls in klasses:
|
||||
CONST_CLS[kls] = Const
|
||||
_update_const_classes()
|
||||
|
||||
def const_factory(value):
|
||||
"""return an astroid node for a python value"""
|
||||
# XXX we should probably be stricter here and only consider stuff in
|
||||
# CONST_CLS or do better treatment: in case where value is not in CONST_CLS,
|
||||
# we should rather recall the builder on this value than returning an empty
|
||||
# node (another option being that const_factory shouldn't be called with something
|
||||
# not in CONST_CLS)
|
||||
assert not isinstance(value, NodeNG)
|
||||
try:
|
||||
return CONST_CLS[value.__class__](value)
|
||||
except (KeyError, AttributeError):
|
||||
node = EmptyNode()
|
||||
node.object = value
|
||||
return node
|
||||
|
|
@ -1,74 +0,0 @@
|
|||
# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
|
||||
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
|
||||
#
|
||||
# This file is part of astroid.
|
||||
#
|
||||
# astroid is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by the
|
||||
# Free Software Foundation, either version 2.1 of the License, or (at your
|
||||
# option) any later version.
|
||||
#
|
||||
# astroid is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
|
||||
# for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License along
|
||||
# with astroid. If not, see <http://www.gnu.org/licenses/>.
|
||||
"""
|
||||
on all nodes :
|
||||
.is_statement, returning true if the node should be considered as a
|
||||
statement node
|
||||
.root(), returning the root node of the tree (i.e. a Module)
|
||||
.previous_sibling(), returning previous sibling statement node
|
||||
.next_sibling(), returning next sibling statement node
|
||||
.statement(), returning the first parent node marked as statement node
|
||||
.frame(), returning the first node defining a new local scope (i.e.
|
||||
Module, Function or Class)
|
||||
.set_local(name, node), define an identifier <name> on the first parent frame,
|
||||
with the node defining it. This is used by the astroid builder and should not
|
||||
be used from out there.
|
||||
|
||||
on From and Import :
|
||||
.real_name(name),
|
||||
|
||||
|
||||
"""
|
||||
# pylint: disable=unused-import
|
||||
|
||||
__docformat__ = "restructuredtext en"
|
||||
|
||||
from astroid.node_classes import Arguments, AssAttr, Assert, Assign, \
|
||||
AssName, AugAssign, Backquote, BinOp, BoolOp, Break, CallFunc, Compare, \
|
||||
Comprehension, Const, Continue, Decorators, DelAttr, DelName, Delete, \
|
||||
Dict, Discard, Ellipsis, EmptyNode, ExceptHandler, Exec, ExtSlice, For, \
|
||||
From, Getattr, Global, If, IfExp, Import, Index, Keyword, \
|
||||
List, Name, Nonlocal, Pass, Print, Raise, Return, Set, Slice, Starred, Subscript, \
|
||||
TryExcept, TryFinally, Tuple, UnaryOp, While, With, Yield, YieldFrom, \
|
||||
const_factory
|
||||
from astroid.scoped_nodes import Module, GenExpr, Lambda, DictComp, \
|
||||
ListComp, SetComp, Function, Class
|
||||
|
||||
ALL_NODE_CLASSES = (
|
||||
Arguments, AssAttr, Assert, Assign, AssName, AugAssign,
|
||||
Backquote, BinOp, BoolOp, Break,
|
||||
CallFunc, Class, Compare, Comprehension, Const, Continue,
|
||||
Decorators, DelAttr, DelName, Delete,
|
||||
Dict, DictComp, Discard,
|
||||
Ellipsis, EmptyNode, ExceptHandler, Exec, ExtSlice,
|
||||
For, From, Function,
|
||||
Getattr, GenExpr, Global,
|
||||
If, IfExp, Import, Index,
|
||||
Keyword,
|
||||
Lambda, List, ListComp,
|
||||
Name, Nonlocal,
|
||||
Module,
|
||||
Pass, Print,
|
||||
Raise, Return,
|
||||
Set, SetComp, Slice, Starred, Subscript,
|
||||
TryExcept, TryFinally, Tuple,
|
||||
UnaryOp,
|
||||
While, With,
|
||||
Yield, YieldFrom
|
||||
)
|
||||
|
||||
|
|
@ -1,415 +0,0 @@
|
|||
# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
|
||||
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
|
||||
#
|
||||
# This file is part of astroid.
|
||||
#
|
||||
# astroid is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by the
|
||||
# Free Software Foundation, either version 2.1 of the License, or (at your
|
||||
# option) any later version.
|
||||
#
|
||||
# astroid is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
|
||||
# for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License along
|
||||
# with astroid. If not, see <http://www.gnu.org/licenses/>.
|
||||
"""this module contains a set of functions to handle python protocols for nodes
|
||||
where it makes sense.
|
||||
"""
|
||||
|
||||
__doctype__ = "restructuredtext en"
|
||||
import collections
|
||||
|
||||
from astroid.exceptions import InferenceError, NoDefault, NotFoundError
|
||||
from astroid.node_classes import unpack_infer
|
||||
from astroid.bases import InferenceContext, copy_context, \
|
||||
raise_if_nothing_infered, yes_if_nothing_infered, Instance, YES
|
||||
from astroid.nodes import const_factory
|
||||
from astroid import nodes
|
||||
|
||||
BIN_OP_METHOD = {'+': '__add__',
|
||||
'-': '__sub__',
|
||||
'/': '__div__',
|
||||
'//': '__floordiv__',
|
||||
'*': '__mul__',
|
||||
'**': '__power__',
|
||||
'%': '__mod__',
|
||||
'&': '__and__',
|
||||
'|': '__or__',
|
||||
'^': '__xor__',
|
||||
'<<': '__lshift__',
|
||||
'>>': '__rshift__',
|
||||
}
|
||||
|
||||
UNARY_OP_METHOD = {'+': '__pos__',
|
||||
'-': '__neg__',
|
||||
'~': '__invert__',
|
||||
'not': None, # XXX not '__nonzero__'
|
||||
}
|
||||
|
||||
# unary operations ############################################################
|
||||
|
||||
def tl_infer_unary_op(self, operator):
|
||||
if operator == 'not':
|
||||
return const_factory(not bool(self.elts))
|
||||
raise TypeError() # XXX log unsupported operation
|
||||
nodes.Tuple.infer_unary_op = tl_infer_unary_op
|
||||
nodes.List.infer_unary_op = tl_infer_unary_op
|
||||
|
||||
|
||||
def dict_infer_unary_op(self, operator):
|
||||
if operator == 'not':
|
||||
return const_factory(not bool(self.items))
|
||||
raise TypeError() # XXX log unsupported operation
|
||||
nodes.Dict.infer_unary_op = dict_infer_unary_op
|
||||
|
||||
|
||||
def const_infer_unary_op(self, operator):
|
||||
if operator == 'not':
|
||||
return const_factory(not self.value)
|
||||
# XXX log potentially raised TypeError
|
||||
elif operator == '+':
|
||||
return const_factory(+self.value)
|
||||
else: # operator == '-':
|
||||
return const_factory(-self.value)
|
||||
nodes.Const.infer_unary_op = const_infer_unary_op
|
||||
|
||||
|
||||
# binary operations ###########################################################
|
||||
|
||||
BIN_OP_IMPL = {'+': lambda a, b: a + b,
|
||||
'-': lambda a, b: a - b,
|
||||
'/': lambda a, b: a / b,
|
||||
'//': lambda a, b: a // b,
|
||||
'*': lambda a, b: a * b,
|
||||
'**': lambda a, b: a ** b,
|
||||
'%': lambda a, b: a % b,
|
||||
'&': lambda a, b: a & b,
|
||||
'|': lambda a, b: a | b,
|
||||
'^': lambda a, b: a ^ b,
|
||||
'<<': lambda a, b: a << b,
|
||||
'>>': lambda a, b: a >> b,
|
||||
}
|
||||
for key, impl in list(BIN_OP_IMPL.items()):
|
||||
BIN_OP_IMPL[key+'='] = impl
|
||||
|
||||
def const_infer_binary_op(self, operator, other, context):
|
||||
for other in other.infer(context):
|
||||
if isinstance(other, nodes.Const):
|
||||
try:
|
||||
impl = BIN_OP_IMPL[operator]
|
||||
|
||||
try:
|
||||
yield const_factory(impl(self.value, other.value))
|
||||
except Exception:
|
||||
# ArithmeticError is not enough: float >> float is a TypeError
|
||||
# TODO : let pylint know about the problem
|
||||
pass
|
||||
except TypeError:
|
||||
# XXX log TypeError
|
||||
continue
|
||||
elif other is YES:
|
||||
yield other
|
||||
else:
|
||||
try:
|
||||
for val in other.infer_binary_op(operator, self, context):
|
||||
yield val
|
||||
except AttributeError:
|
||||
yield YES
|
||||
nodes.Const.infer_binary_op = yes_if_nothing_infered(const_infer_binary_op)
|
||||
|
||||
|
||||
def tl_infer_binary_op(self, operator, other, context):
|
||||
for other in other.infer(context):
|
||||
if isinstance(other, self.__class__) and operator == '+':
|
||||
node = self.__class__()
|
||||
elts = [n for elt in self.elts for n in elt.infer(context)
|
||||
if not n is YES]
|
||||
elts += [n for elt in other.elts for n in elt.infer(context)
|
||||
if not n is YES]
|
||||
node.elts = elts
|
||||
yield node
|
||||
elif isinstance(other, nodes.Const) and operator == '*':
|
||||
if not isinstance(other.value, int):
|
||||
yield YES
|
||||
continue
|
||||
node = self.__class__()
|
||||
elts = [n for elt in self.elts for n in elt.infer(context)
|
||||
if not n is YES] * other.value
|
||||
node.elts = elts
|
||||
yield node
|
||||
elif isinstance(other, Instance) and not isinstance(other, nodes.Const):
|
||||
yield YES
|
||||
# XXX else log TypeError
|
||||
nodes.Tuple.infer_binary_op = yes_if_nothing_infered(tl_infer_binary_op)
|
||||
nodes.List.infer_binary_op = yes_if_nothing_infered(tl_infer_binary_op)
|
||||
|
||||
|
||||
def dict_infer_binary_op(self, operator, other, context):
|
||||
for other in other.infer(context):
|
||||
if isinstance(other, Instance) and isinstance(other._proxied, nodes.Class):
|
||||
yield YES
|
||||
# XXX else log TypeError
|
||||
nodes.Dict.infer_binary_op = yes_if_nothing_infered(dict_infer_binary_op)
|
||||
|
||||
def instance_infer_binary_op(self, operator, other, context):
|
||||
try:
|
||||
methods = self.getattr(BIN_OP_METHOD[operator])
|
||||
except (NotFoundError, KeyError):
|
||||
# Unknown operator
|
||||
yield YES
|
||||
else:
|
||||
for method in methods:
|
||||
if not isinstance(method, nodes.Function):
|
||||
continue
|
||||
for result in method.infer_call_result(self, context):
|
||||
if result is not YES:
|
||||
yield result
|
||||
# We are interested only in the first infered method,
|
||||
# don't go looking in the rest of the methods of the ancestors.
|
||||
break
|
||||
|
||||
Instance.infer_binary_op = yes_if_nothing_infered(instance_infer_binary_op)
|
||||
|
||||
|
||||
# assignment ##################################################################
|
||||
|
||||
"""the assigned_stmts method is responsible to return the assigned statement
|
||||
(e.g. not inferred) according to the assignment type.
|
||||
|
||||
The `asspath` argument is used to record the lhs path of the original node.
|
||||
For instance if we want assigned statements for 'c' in 'a, (b,c)', asspath
|
||||
will be [1, 1] once arrived to the Assign node.
|
||||
|
||||
The `context` argument is the current inference context which should be given
|
||||
to any intermediary inference necessary.
|
||||
"""
|
||||
|
||||
def _resolve_looppart(parts, asspath, context):
|
||||
"""recursive function to resolve multiple assignments on loops"""
|
||||
asspath = asspath[:]
|
||||
index = asspath.pop(0)
|
||||
for part in parts:
|
||||
if part is YES:
|
||||
continue
|
||||
# XXX handle __iter__ and log potentially detected errors
|
||||
if not hasattr(part, 'itered'):
|
||||
continue
|
||||
try:
|
||||
itered = part.itered()
|
||||
except TypeError:
|
||||
continue # XXX log error
|
||||
for stmt in itered:
|
||||
try:
|
||||
assigned = stmt.getitem(index, context)
|
||||
except (AttributeError, IndexError):
|
||||
continue
|
||||
except TypeError: # stmt is unsubscriptable Const
|
||||
continue
|
||||
if not asspath:
|
||||
# we achieved to resolved the assignment path,
|
||||
# don't infer the last part
|
||||
yield assigned
|
||||
elif assigned is YES:
|
||||
break
|
||||
else:
|
||||
# we are not yet on the last part of the path
|
||||
# search on each possibly inferred value
|
||||
try:
|
||||
for infered in _resolve_looppart(assigned.infer(context),
|
||||
asspath, context):
|
||||
yield infered
|
||||
except InferenceError:
|
||||
break
|
||||
|
||||
|
||||
def for_assigned_stmts(self, node, context=None, asspath=None):
|
||||
if asspath is None:
|
||||
for lst in self.iter.infer(context):
|
||||
if isinstance(lst, (nodes.Tuple, nodes.List)):
|
||||
for item in lst.elts:
|
||||
yield item
|
||||
else:
|
||||
for infered in _resolve_looppart(self.iter.infer(context),
|
||||
asspath, context):
|
||||
yield infered
|
||||
|
||||
nodes.For.assigned_stmts = raise_if_nothing_infered(for_assigned_stmts)
|
||||
nodes.Comprehension.assigned_stmts = raise_if_nothing_infered(for_assigned_stmts)
|
||||
|
||||
|
||||
def mulass_assigned_stmts(self, node, context=None, asspath=None):
|
||||
if asspath is None:
|
||||
asspath = []
|
||||
asspath.insert(0, self.elts.index(node))
|
||||
return self.parent.assigned_stmts(self, context, asspath)
|
||||
nodes.Tuple.assigned_stmts = mulass_assigned_stmts
|
||||
nodes.List.assigned_stmts = mulass_assigned_stmts
|
||||
|
||||
|
||||
def assend_assigned_stmts(self, context=None):
|
||||
return self.parent.assigned_stmts(self, context=context)
|
||||
nodes.AssName.assigned_stmts = assend_assigned_stmts
|
||||
nodes.AssAttr.assigned_stmts = assend_assigned_stmts
|
||||
|
||||
|
||||
def _arguments_infer_argname(self, name, context):
|
||||
# arguments information may be missing, in which case we can't do anything
|
||||
# more
|
||||
if not (self.args or self.vararg or self.kwarg):
|
||||
yield YES
|
||||
return
|
||||
# first argument of instance/class method
|
||||
if self.args and getattr(self.args[0], 'name', None) == name:
|
||||
functype = self.parent.type
|
||||
if functype == 'method':
|
||||
yield Instance(self.parent.parent.frame())
|
||||
return
|
||||
if functype == 'classmethod':
|
||||
yield self.parent.parent.frame()
|
||||
return
|
||||
if name == self.vararg:
|
||||
vararg = const_factory(())
|
||||
vararg.parent = self
|
||||
yield vararg
|
||||
return
|
||||
if name == self.kwarg:
|
||||
kwarg = const_factory({})
|
||||
kwarg.parent = self
|
||||
yield kwarg
|
||||
return
|
||||
# if there is a default value, yield it. And then yield YES to reflect
|
||||
# we can't guess given argument value
|
||||
try:
|
||||
context = copy_context(context)
|
||||
for infered in self.default_value(name).infer(context):
|
||||
yield infered
|
||||
yield YES
|
||||
except NoDefault:
|
||||
yield YES
|
||||
|
||||
|
||||
def arguments_assigned_stmts(self, node, context, asspath=None):
|
||||
if context.callcontext:
|
||||
# reset call context/name
|
||||
callcontext = context.callcontext
|
||||
context = copy_context(context)
|
||||
context.callcontext = None
|
||||
return callcontext.infer_argument(self.parent, node.name, context)
|
||||
return _arguments_infer_argname(self, node.name, context)
|
||||
nodes.Arguments.assigned_stmts = arguments_assigned_stmts
|
||||
|
||||
|
||||
def assign_assigned_stmts(self, node, context=None, asspath=None):
|
||||
if not asspath:
|
||||
yield self.value
|
||||
return
|
||||
for infered in _resolve_asspart(self.value.infer(context), asspath, context):
|
||||
yield infered
|
||||
nodes.Assign.assigned_stmts = raise_if_nothing_infered(assign_assigned_stmts)
|
||||
nodes.AugAssign.assigned_stmts = raise_if_nothing_infered(assign_assigned_stmts)
|
||||
|
||||
|
||||
def _resolve_asspart(parts, asspath, context):
|
||||
"""recursive function to resolve multiple assignments"""
|
||||
asspath = asspath[:]
|
||||
index = asspath.pop(0)
|
||||
for part in parts:
|
||||
if hasattr(part, 'getitem'):
|
||||
try:
|
||||
assigned = part.getitem(index, context)
|
||||
# XXX raise a specific exception to avoid potential hiding of
|
||||
# unexpected exception ?
|
||||
except (TypeError, IndexError):
|
||||
return
|
||||
if not asspath:
|
||||
# we achieved to resolved the assignment path, don't infer the
|
||||
# last part
|
||||
yield assigned
|
||||
elif assigned is YES:
|
||||
return
|
||||
else:
|
||||
# we are not yet on the last part of the path search on each
|
||||
# possibly inferred value
|
||||
try:
|
||||
for infered in _resolve_asspart(assigned.infer(context),
|
||||
asspath, context):
|
||||
yield infered
|
||||
except InferenceError:
|
||||
return
|
||||
|
||||
|
||||
def excepthandler_assigned_stmts(self, node, context=None, asspath=None):
|
||||
for assigned in unpack_infer(self.type):
|
||||
if isinstance(assigned, nodes.Class):
|
||||
assigned = Instance(assigned)
|
||||
yield assigned
|
||||
nodes.ExceptHandler.assigned_stmts = raise_if_nothing_infered(excepthandler_assigned_stmts)
|
||||
|
||||
|
||||
def with_assigned_stmts(self, node, context=None, asspath=None):
|
||||
if asspath is None:
|
||||
for _, vars in self.items:
|
||||
if vars is None:
|
||||
continue
|
||||
for lst in vars.infer(context):
|
||||
if isinstance(lst, (nodes.Tuple, nodes.List)):
|
||||
for item in lst.nodes:
|
||||
yield item
|
||||
nodes.With.assigned_stmts = raise_if_nothing_infered(with_assigned_stmts)
|
||||
|
||||
|
||||
def starred_assigned_stmts(self, node=None, context=None, asspath=None):
|
||||
stmt = self.statement()
|
||||
if not isinstance(stmt, (nodes.Assign, nodes.For)):
|
||||
raise InferenceError()
|
||||
|
||||
if isinstance(stmt, nodes.Assign):
|
||||
value = stmt.value
|
||||
lhs = stmt.targets[0]
|
||||
|
||||
if sum(1 for node in lhs.nodes_of_class(nodes.Starred)) > 1:
|
||||
# Too many starred arguments in the expression.
|
||||
raise InferenceError()
|
||||
|
||||
if context is None:
|
||||
context = InferenceContext()
|
||||
try:
|
||||
rhs = next(value.infer(context))
|
||||
except InferenceError:
|
||||
yield YES
|
||||
return
|
||||
if rhs is YES or not hasattr(rhs, 'elts'):
|
||||
# Not interested in inferred values without elts.
|
||||
yield YES
|
||||
return
|
||||
|
||||
elts = collections.deque(rhs.elts[:])
|
||||
if len(lhs.elts) > len(rhs.elts):
|
||||
# a, *b, c = (1, 2)
|
||||
raise InferenceError()
|
||||
|
||||
# Unpack iteratively the values from the rhs of the assignment,
|
||||
# until the find the starred node. What will remain will
|
||||
# be the list of values which the Starred node will represent
|
||||
# This is done in two steps, from left to right to remove
|
||||
# anything before the starred node and from right to left
|
||||
# to remvoe anything after the starred node.
|
||||
|
||||
for index, node in enumerate(lhs.elts):
|
||||
if not isinstance(node, nodes.Starred):
|
||||
elts.popleft()
|
||||
continue
|
||||
lhs_elts = collections.deque(reversed(lhs.elts[index:]))
|
||||
for node in lhs_elts:
|
||||
if not isinstance(node, nodes.Starred):
|
||||
elts.pop()
|
||||
continue
|
||||
# We're done
|
||||
for elt in elts:
|
||||
yield elt
|
||||
break
|
||||
|
||||
nodes.Starred.assigned_stmts = starred_assigned_stmts
|
||||
|
|
@ -1,366 +0,0 @@
|
|||
# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
|
||||
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
|
||||
#
|
||||
# This file is part of astroid.
|
||||
#
|
||||
# astroid is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by the
|
||||
# Free Software Foundation, either version 2.1 of the License, or (at your
|
||||
# option) any later version.
|
||||
#
|
||||
# astroid is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
|
||||
# for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License along
|
||||
# with astroid. If not, see <http://www.gnu.org/licenses/>.
|
||||
"""this module contains a set of functions to create astroid trees from scratch
|
||||
(build_* functions) or from living object (object_build_* functions)
|
||||
"""
|
||||
|
||||
__docformat__ = "restructuredtext en"
|
||||
|
||||
import sys
|
||||
from os.path import abspath
|
||||
from inspect import (getargspec, isdatadescriptor, isfunction, ismethod,
|
||||
ismethoddescriptor, isclass, isbuiltin, ismodule)
|
||||
import six
|
||||
|
||||
from astroid.node_classes import CONST_CLS
|
||||
from astroid.nodes import (Module, Class, Const, const_factory, From,
|
||||
Function, EmptyNode, Name, Arguments)
|
||||
from astroid.bases import BUILTINS, Generator
|
||||
from astroid.manager import AstroidManager
|
||||
MANAGER = AstroidManager()
|
||||
|
||||
_CONSTANTS = tuple(CONST_CLS) # the keys of CONST_CLS eg python builtin types
|
||||
|
||||
def _io_discrepancy(member):
|
||||
# _io module names itself `io`: http://bugs.python.org/issue18602
|
||||
member_self = getattr(member, '__self__', None)
|
||||
return (member_self and
|
||||
ismodule(member_self) and
|
||||
member_self.__name__ == '_io' and
|
||||
member.__module__ == 'io')
|
||||
|
||||
def _attach_local_node(parent, node, name):
|
||||
node.name = name # needed by add_local_node
|
||||
parent.add_local_node(node)
|
||||
|
||||
_marker = object()
|
||||
|
||||
def attach_dummy_node(node, name, object=_marker):
|
||||
"""create a dummy node and register it in the locals of the given
|
||||
node with the specified name
|
||||
"""
|
||||
enode = EmptyNode()
|
||||
enode.object = object
|
||||
_attach_local_node(node, enode, name)
|
||||
|
||||
def _has_underlying_object(self):
|
||||
return hasattr(self, 'object') and self.object is not _marker
|
||||
|
||||
EmptyNode.has_underlying_object = _has_underlying_object
|
||||
|
||||
def attach_const_node(node, name, value):
|
||||
"""create a Const node and register it in the locals of the given
|
||||
node with the specified name
|
||||
"""
|
||||
if not name in node.special_attributes:
|
||||
_attach_local_node(node, const_factory(value), name)
|
||||
|
||||
def attach_import_node(node, modname, membername):
|
||||
"""create a From node and register it in the locals of the given
|
||||
node with the specified name
|
||||
"""
|
||||
from_node = From(modname, [(membername, None)])
|
||||
_attach_local_node(node, from_node, membername)
|
||||
|
||||
|
||||
def build_module(name, doc=None):
|
||||
"""create and initialize a astroid Module node"""
|
||||
node = Module(name, doc, pure_python=False)
|
||||
node.package = False
|
||||
node.parent = None
|
||||
return node
|
||||
|
||||
def build_class(name, basenames=(), doc=None):
|
||||
"""create and initialize a astroid Class node"""
|
||||
node = Class(name, doc)
|
||||
for base in basenames:
|
||||
basenode = Name()
|
||||
basenode.name = base
|
||||
node.bases.append(basenode)
|
||||
basenode.parent = node
|
||||
return node
|
||||
|
||||
def build_function(name, args=None, defaults=None, flag=0, doc=None):
|
||||
"""create and initialize a astroid Function node"""
|
||||
args, defaults = args or [], defaults or []
|
||||
# first argument is now a list of decorators
|
||||
func = Function(name, doc)
|
||||
func.args = argsnode = Arguments()
|
||||
argsnode.args = []
|
||||
for arg in args:
|
||||
argsnode.args.append(Name())
|
||||
argsnode.args[-1].name = arg
|
||||
argsnode.args[-1].parent = argsnode
|
||||
argsnode.defaults = []
|
||||
for default in defaults:
|
||||
argsnode.defaults.append(const_factory(default))
|
||||
argsnode.defaults[-1].parent = argsnode
|
||||
argsnode.kwarg = None
|
||||
argsnode.vararg = None
|
||||
argsnode.parent = func
|
||||
if args:
|
||||
register_arguments(func)
|
||||
return func
|
||||
|
||||
|
||||
def build_from_import(fromname, names):
|
||||
"""create and initialize an astroid From import statement"""
|
||||
return From(fromname, [(name, None) for name in names])
|
||||
|
||||
def register_arguments(func, args=None):
|
||||
"""add given arguments to local
|
||||
|
||||
args is a list that may contains nested lists
|
||||
(i.e. def func(a, (b, c, d)): ...)
|
||||
"""
|
||||
if args is None:
|
||||
args = func.args.args
|
||||
if func.args.vararg:
|
||||
func.set_local(func.args.vararg, func.args)
|
||||
if func.args.kwarg:
|
||||
func.set_local(func.args.kwarg, func.args)
|
||||
for arg in args:
|
||||
if isinstance(arg, Name):
|
||||
func.set_local(arg.name, arg)
|
||||
else:
|
||||
register_arguments(func, arg.elts)
|
||||
|
||||
def object_build_class(node, member, localname):
|
||||
"""create astroid for a living class object"""
|
||||
basenames = [base.__name__ for base in member.__bases__]
|
||||
return _base_class_object_build(node, member, basenames,
|
||||
localname=localname)
|
||||
|
||||
def object_build_function(node, member, localname):
|
||||
"""create astroid for a living function object"""
|
||||
args, varargs, varkw, defaults = getargspec(member)
|
||||
if varargs is not None:
|
||||
args.append(varargs)
|
||||
if varkw is not None:
|
||||
args.append(varkw)
|
||||
func = build_function(getattr(member, '__name__', None) or localname, args,
|
||||
defaults, six.get_function_code(member).co_flags, member.__doc__)
|
||||
node.add_local_node(func, localname)
|
||||
|
||||
def object_build_datadescriptor(node, member, name):
|
||||
"""create astroid for a living data descriptor object"""
|
||||
return _base_class_object_build(node, member, [], name)
|
||||
|
||||
def object_build_methoddescriptor(node, member, localname):
|
||||
"""create astroid for a living method descriptor object"""
|
||||
# FIXME get arguments ?
|
||||
func = build_function(getattr(member, '__name__', None) or localname,
|
||||
doc=member.__doc__)
|
||||
# set node's arguments to None to notice that we have no information, not
|
||||
# and empty argument list
|
||||
func.args.args = None
|
||||
node.add_local_node(func, localname)
|
||||
|
||||
def _base_class_object_build(node, member, basenames, name=None, localname=None):
|
||||
"""create astroid for a living class object, with a given set of base names
|
||||
(e.g. ancestors)
|
||||
"""
|
||||
klass = build_class(name or getattr(member, '__name__', None) or localname,
|
||||
basenames, member.__doc__)
|
||||
klass._newstyle = isinstance(member, type)
|
||||
node.add_local_node(klass, localname)
|
||||
try:
|
||||
# limit the instantiation trick since it's too dangerous
|
||||
# (such as infinite test execution...)
|
||||
# this at least resolves common case such as Exception.args,
|
||||
# OSError.errno
|
||||
if issubclass(member, Exception):
|
||||
instdict = member().__dict__
|
||||
else:
|
||||
raise TypeError
|
||||
except:
|
||||
pass
|
||||
else:
|
||||
for name, obj in instdict.items():
|
||||
valnode = EmptyNode()
|
||||
valnode.object = obj
|
||||
valnode.parent = klass
|
||||
valnode.lineno = 1
|
||||
klass.instance_attrs[name] = [valnode]
|
||||
return klass
|
||||
|
||||
|
||||
|
||||
|
||||
class InspectBuilder(object):
|
||||
"""class for building nodes from living object
|
||||
|
||||
this is actually a really minimal representation, including only Module,
|
||||
Function and Class nodes and some others as guessed.
|
||||
"""
|
||||
|
||||
# astroid from living objects ###############################################
|
||||
|
||||
def __init__(self):
|
||||
self._done = {}
|
||||
self._module = None
|
||||
|
||||
def inspect_build(self, module, modname=None, path=None):
|
||||
"""build astroid from a living module (i.e. using inspect)
|
||||
this is used when there is no python source code available (either
|
||||
because it's a built-in module or because the .py is not available)
|
||||
"""
|
||||
self._module = module
|
||||
if modname is None:
|
||||
modname = module.__name__
|
||||
try:
|
||||
node = build_module(modname, module.__doc__)
|
||||
except AttributeError:
|
||||
# in jython, java modules have no __doc__ (see #109562)
|
||||
node = build_module(modname)
|
||||
node.file = node.path = path and abspath(path) or path
|
||||
node.name = modname
|
||||
MANAGER.cache_module(node)
|
||||
node.package = hasattr(module, '__path__')
|
||||
self._done = {}
|
||||
self.object_build(node, module)
|
||||
return node
|
||||
|
||||
def object_build(self, node, obj):
|
||||
"""recursive method which create a partial ast from real objects
|
||||
(only function, class, and method are handled)
|
||||
"""
|
||||
if obj in self._done:
|
||||
return self._done[obj]
|
||||
self._done[obj] = node
|
||||
for name in dir(obj):
|
||||
try:
|
||||
member = getattr(obj, name)
|
||||
except AttributeError:
|
||||
# damned ExtensionClass.Base, I know you're there !
|
||||
attach_dummy_node(node, name)
|
||||
continue
|
||||
if ismethod(member):
|
||||
member = six.get_method_function(member)
|
||||
if isfunction(member):
|
||||
# verify this is not an imported function
|
||||
filename = getattr(six.get_function_code(member),
|
||||
'co_filename', None)
|
||||
if filename is None:
|
||||
assert isinstance(member, object)
|
||||
object_build_methoddescriptor(node, member, name)
|
||||
elif filename != getattr(self._module, '__file__', None):
|
||||
attach_dummy_node(node, name, member)
|
||||
else:
|
||||
object_build_function(node, member, name)
|
||||
elif isbuiltin(member):
|
||||
if (not _io_discrepancy(member) and
|
||||
self.imported_member(node, member, name)):
|
||||
continue
|
||||
object_build_methoddescriptor(node, member, name)
|
||||
elif isclass(member):
|
||||
if self.imported_member(node, member, name):
|
||||
continue
|
||||
if member in self._done:
|
||||
class_node = self._done[member]
|
||||
if not class_node in node.locals.get(name, ()):
|
||||
node.add_local_node(class_node, name)
|
||||
else:
|
||||
class_node = object_build_class(node, member, name)
|
||||
# recursion
|
||||
self.object_build(class_node, member)
|
||||
if name == '__class__' and class_node.parent is None:
|
||||
class_node.parent = self._done[self._module]
|
||||
elif ismethoddescriptor(member):
|
||||
assert isinstance(member, object)
|
||||
object_build_methoddescriptor(node, member, name)
|
||||
elif isdatadescriptor(member):
|
||||
assert isinstance(member, object)
|
||||
object_build_datadescriptor(node, member, name)
|
||||
elif type(member) in _CONSTANTS:
|
||||
attach_const_node(node, name, member)
|
||||
else:
|
||||
# create an empty node so that the name is actually defined
|
||||
attach_dummy_node(node, name, member)
|
||||
|
||||
def imported_member(self, node, member, name):
|
||||
"""verify this is not an imported class or handle it"""
|
||||
# /!\ some classes like ExtensionClass doesn't have a __module__
|
||||
# attribute ! Also, this may trigger an exception on badly built module
|
||||
# (see http://www.logilab.org/ticket/57299 for instance)
|
||||
try:
|
||||
modname = getattr(member, '__module__', None)
|
||||
except:
|
||||
# XXX use logging
|
||||
print('unexpected error while building astroid from living object')
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
modname = None
|
||||
if modname is None:
|
||||
if name in ('__new__', '__subclasshook__'):
|
||||
# Python 2.5.1 (r251:54863, Sep 1 2010, 22:03:14)
|
||||
# >>> print object.__new__.__module__
|
||||
# None
|
||||
modname = BUILTINS
|
||||
else:
|
||||
attach_dummy_node(node, name, member)
|
||||
return True
|
||||
if {'gtk': 'gtk._gtk'}.get(modname, modname) != self._module.__name__:
|
||||
# check if it sounds valid and then add an import node, else use a
|
||||
# dummy node
|
||||
try:
|
||||
getattr(sys.modules[modname], name)
|
||||
except (KeyError, AttributeError):
|
||||
attach_dummy_node(node, name, member)
|
||||
else:
|
||||
attach_import_node(node, modname, name)
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
### astroid bootstrapping ######################################################
|
||||
Astroid_BUILDER = InspectBuilder()
|
||||
|
||||
_CONST_PROXY = {}
|
||||
def _astroid_bootstrapping(astroid_builtin=None):
|
||||
"""astroid boot strapping the builtins module"""
|
||||
# this boot strapping is necessary since we need the Const nodes to
|
||||
# inspect_build builtins, and then we can proxy Const
|
||||
if astroid_builtin is None:
|
||||
from logilab.common.compat import builtins
|
||||
astroid_builtin = Astroid_BUILDER.inspect_build(builtins)
|
||||
|
||||
for cls, node_cls in CONST_CLS.items():
|
||||
if cls is type(None):
|
||||
proxy = build_class('NoneType')
|
||||
proxy.parent = astroid_builtin
|
||||
else:
|
||||
proxy = astroid_builtin.getattr(cls.__name__)[0]
|
||||
if cls in (dict, list, set, tuple):
|
||||
node_cls._proxied = proxy
|
||||
else:
|
||||
_CONST_PROXY[cls] = proxy
|
||||
|
||||
_astroid_bootstrapping()
|
||||
|
||||
# TODO : find a nicer way to handle this situation;
|
||||
# However __proxied introduced an
|
||||
# infinite recursion (see https://bugs.launchpad.net/pylint/+bug/456870)
|
||||
def _set_proxied(const):
|
||||
return _CONST_PROXY[const.value.__class__]
|
||||
Const._proxied = property(_set_proxied)
|
||||
|
||||
from types import GeneratorType
|
||||
Generator._proxied = Class(GeneratorType.__name__, GeneratorType.__doc__)
|
||||
Astroid_BUILDER.object_build(Generator._proxied, GeneratorType)
|
||||
|
||||
|
|
@ -1,926 +0,0 @@
|
|||
# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
|
||||
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
|
||||
#
|
||||
# This file is part of astroid.
|
||||
#
|
||||
# astroid is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by the
|
||||
# Free Software Foundation, either version 2.1 of the License, or (at your
|
||||
# option) any later version.
|
||||
#
|
||||
# astroid is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
|
||||
# for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License along
|
||||
# with astroid. If not, see <http://www.gnu.org/licenses/>.
|
||||
"""this module contains utilities for rebuilding a _ast tree in
|
||||
order to get a single Astroid representation
|
||||
"""
|
||||
|
||||
import sys
|
||||
from _ast import (
|
||||
Expr as Discard, Str,
|
||||
# binary operators
|
||||
Add, BinOp, Div, FloorDiv, Mod, Mult, Pow, Sub, BitAnd, BitOr, BitXor,
|
||||
LShift, RShift,
|
||||
# logical operators
|
||||
And, Or,
|
||||
# unary operators
|
||||
UAdd, USub, Not, Invert,
|
||||
# comparison operators
|
||||
Eq, Gt, GtE, In, Is, IsNot, Lt, LtE, NotEq, NotIn,
|
||||
)
|
||||
|
||||
from astroid import nodes as new
|
||||
from astroid import astpeephole
|
||||
|
||||
|
||||
_BIN_OP_CLASSES = {Add: '+',
|
||||
BitAnd: '&',
|
||||
BitOr: '|',
|
||||
BitXor: '^',
|
||||
Div: '/',
|
||||
FloorDiv: '//',
|
||||
Mod: '%',
|
||||
Mult: '*',
|
||||
Pow: '**',
|
||||
Sub: '-',
|
||||
LShift: '<<',
|
||||
RShift: '>>',
|
||||
}
|
||||
|
||||
_BOOL_OP_CLASSES = {And: 'and',
|
||||
Or: 'or',
|
||||
}
|
||||
|
||||
_UNARY_OP_CLASSES = {UAdd: '+',
|
||||
USub: '-',
|
||||
Not: 'not',
|
||||
Invert: '~',
|
||||
}
|
||||
|
||||
_CMP_OP_CLASSES = {Eq: '==',
|
||||
Gt: '>',
|
||||
GtE: '>=',
|
||||
In: 'in',
|
||||
Is: 'is',
|
||||
IsNot: 'is not',
|
||||
Lt: '<',
|
||||
LtE: '<=',
|
||||
NotEq: '!=',
|
||||
NotIn: 'not in',
|
||||
}
|
||||
|
||||
CONST_NAME_TRANSFORMS = {'None': None,
|
||||
'True': True,
|
||||
'False': False,
|
||||
}
|
||||
|
||||
REDIRECT = {'arguments': 'Arguments',
|
||||
'Attribute': 'Getattr',
|
||||
'comprehension': 'Comprehension',
|
||||
'Call': 'CallFunc',
|
||||
'ClassDef': 'Class',
|
||||
"ListCompFor": 'Comprehension',
|
||||
"GenExprFor": 'Comprehension',
|
||||
'excepthandler': 'ExceptHandler',
|
||||
'Expr': 'Discard',
|
||||
'FunctionDef': 'Function',
|
||||
'GeneratorExp': 'GenExpr',
|
||||
'ImportFrom': 'From',
|
||||
'keyword': 'Keyword',
|
||||
'Repr': 'Backquote',
|
||||
}
|
||||
PY3K = sys.version_info >= (3, 0)
|
||||
PY34 = sys.version_info >= (3, 4)
|
||||
|
||||
def _init_set_doc(node, newnode):
|
||||
newnode.doc = None
|
||||
try:
|
||||
if isinstance(node.body[0], Discard) and isinstance(node.body[0].value, Str):
|
||||
newnode.doc = node.body[0].value.s
|
||||
node.body = node.body[1:]
|
||||
|
||||
except IndexError:
|
||||
pass # ast built from scratch
|
||||
|
||||
def _lineno_parent(oldnode, newnode, parent):
|
||||
newnode.parent = parent
|
||||
newnode.lineno = oldnode.lineno
|
||||
newnode.col_offset = oldnode.col_offset
|
||||
|
||||
def _set_infos(oldnode, newnode, parent):
|
||||
newnode.parent = parent
|
||||
if hasattr(oldnode, 'lineno'):
|
||||
newnode.lineno = oldnode.lineno
|
||||
if hasattr(oldnode, 'col_offset'):
|
||||
newnode.col_offset = oldnode.col_offset
|
||||
|
||||
def _create_yield_node(node, parent, rebuilder, factory):
|
||||
newnode = factory()
|
||||
_lineno_parent(node, newnode, parent)
|
||||
if node.value is not None:
|
||||
newnode.value = rebuilder.visit(node.value, newnode)
|
||||
return newnode
|
||||
|
||||
|
||||
class TreeRebuilder(object):
|
||||
"""Rebuilds the _ast tree to become an Astroid tree"""
|
||||
|
||||
def __init__(self, manager):
|
||||
self._manager = manager
|
||||
self.asscontext = None
|
||||
self._global_names = []
|
||||
self._from_nodes = []
|
||||
self._delayed_assattr = []
|
||||
self._visit_meths = {}
|
||||
self._transform = manager.transform
|
||||
self._peepholer = astpeephole.ASTPeepholeOptimizer()
|
||||
|
||||
def visit_module(self, node, modname, modpath, package):
|
||||
"""visit a Module node by returning a fresh instance of it"""
|
||||
newnode = new.Module(modname, None)
|
||||
newnode.package = package
|
||||
newnode.parent = None
|
||||
_init_set_doc(node, newnode)
|
||||
newnode.body = [self.visit(child, newnode) for child in node.body]
|
||||
newnode.file = newnode.path = modpath
|
||||
return self._transform(newnode)
|
||||
|
||||
def visit(self, node, parent):
|
||||
cls = node.__class__
|
||||
if cls in self._visit_meths:
|
||||
visit_method = self._visit_meths[cls]
|
||||
else:
|
||||
cls_name = cls.__name__
|
||||
visit_name = 'visit_' + REDIRECT.get(cls_name, cls_name).lower()
|
||||
visit_method = getattr(self, visit_name)
|
||||
self._visit_meths[cls] = visit_method
|
||||
return self._transform(visit_method(node, parent))
|
||||
|
||||
def _save_assignment(self, node, name=None):
|
||||
"""save assignement situation since node.parent is not available yet"""
|
||||
if self._global_names and node.name in self._global_names[-1]:
|
||||
node.root().set_local(node.name, node)
|
||||
else:
|
||||
node.parent.set_local(node.name, node)
|
||||
|
||||
|
||||
def visit_arguments(self, node, parent):
|
||||
"""visit a Arguments node by returning a fresh instance of it"""
|
||||
newnode = new.Arguments()
|
||||
newnode.parent = parent
|
||||
self.asscontext = "Ass"
|
||||
newnode.args = [self.visit(child, newnode) for child in node.args]
|
||||
self.asscontext = None
|
||||
newnode.defaults = [self.visit(child, newnode) for child in node.defaults]
|
||||
newnode.kwonlyargs = []
|
||||
newnode.kw_defaults = []
|
||||
vararg, kwarg = node.vararg, node.kwarg
|
||||
# change added in 82732 (7c5c678e4164), vararg and kwarg
|
||||
# are instances of `_ast.arg`, not strings
|
||||
if vararg:
|
||||
if PY34:
|
||||
if vararg.annotation:
|
||||
newnode.varargannotation = self.visit(vararg.annotation,
|
||||
newnode)
|
||||
vararg = vararg.arg
|
||||
elif PY3K and node.varargannotation:
|
||||
newnode.varargannotation = self.visit(node.varargannotation,
|
||||
newnode)
|
||||
if kwarg:
|
||||
if PY34:
|
||||
if kwarg.annotation:
|
||||
newnode.kwargannotation = self.visit(kwarg.annotation,
|
||||
newnode)
|
||||
kwarg = kwarg.arg
|
||||
elif PY3K:
|
||||
if node.kwargannotation:
|
||||
newnode.kwargannotation = self.visit(node.kwargannotation,
|
||||
newnode)
|
||||
newnode.vararg = vararg
|
||||
newnode.kwarg = kwarg
|
||||
# save argument names in locals:
|
||||
if vararg:
|
||||
newnode.parent.set_local(vararg, newnode)
|
||||
if kwarg:
|
||||
newnode.parent.set_local(kwarg, newnode)
|
||||
return newnode
|
||||
|
||||
def visit_assattr(self, node, parent):
|
||||
"""visit a AssAttr node by returning a fresh instance of it"""
|
||||
assc, self.asscontext = self.asscontext, None
|
||||
newnode = new.AssAttr()
|
||||
_lineno_parent(node, newnode, parent)
|
||||
newnode.expr = self.visit(node.expr, newnode)
|
||||
self.asscontext = assc
|
||||
self._delayed_assattr.append(newnode)
|
||||
return newnode
|
||||
|
||||
def visit_assert(self, node, parent):
|
||||
"""visit a Assert node by returning a fresh instance of it"""
|
||||
newnode = new.Assert()
|
||||
_lineno_parent(node, newnode, parent)
|
||||
newnode.test = self.visit(node.test, newnode)
|
||||
if node.msg is not None:
|
||||
newnode.fail = self.visit(node.msg, newnode)
|
||||
return newnode
|
||||
|
||||
def visit_assign(self, node, parent):
|
||||
"""visit a Assign node by returning a fresh instance of it"""
|
||||
newnode = new.Assign()
|
||||
_lineno_parent(node, newnode, parent)
|
||||
self.asscontext = "Ass"
|
||||
newnode.targets = [self.visit(child, newnode) for child in node.targets]
|
||||
self.asscontext = None
|
||||
newnode.value = self.visit(node.value, newnode)
|
||||
# set some function or metaclass infos XXX explain ?
|
||||
klass = newnode.parent.frame()
|
||||
if (isinstance(klass, new.Class)
|
||||
and isinstance(newnode.value, new.CallFunc)
|
||||
and isinstance(newnode.value.func, new.Name)):
|
||||
func_name = newnode.value.func.name
|
||||
for ass_node in newnode.targets:
|
||||
try:
|
||||
meth = klass[ass_node.name]
|
||||
if isinstance(meth, new.Function):
|
||||
if func_name in ('classmethod', 'staticmethod'):
|
||||
meth.type = func_name
|
||||
elif func_name == 'classproperty': # see lgc.decorators
|
||||
meth.type = 'classmethod'
|
||||
meth.extra_decorators.append(newnode.value)
|
||||
except (AttributeError, KeyError):
|
||||
continue
|
||||
return newnode
|
||||
|
||||
def visit_assname(self, node, parent, node_name=None):
|
||||
'''visit a node and return a AssName node'''
|
||||
newnode = new.AssName()
|
||||
_set_infos(node, newnode, parent)
|
||||
newnode.name = node_name
|
||||
self._save_assignment(newnode)
|
||||
return newnode
|
||||
|
||||
def visit_augassign(self, node, parent):
|
||||
"""visit a AugAssign node by returning a fresh instance of it"""
|
||||
newnode = new.AugAssign()
|
||||
_lineno_parent(node, newnode, parent)
|
||||
newnode.op = _BIN_OP_CLASSES[node.op.__class__] + "="
|
||||
self.asscontext = "Ass"
|
||||
newnode.target = self.visit(node.target, newnode)
|
||||
self.asscontext = None
|
||||
newnode.value = self.visit(node.value, newnode)
|
||||
return newnode
|
||||
|
||||
def visit_backquote(self, node, parent):
|
||||
"""visit a Backquote node by returning a fresh instance of it"""
|
||||
newnode = new.Backquote()
|
||||
_lineno_parent(node, newnode, parent)
|
||||
newnode.value = self.visit(node.value, newnode)
|
||||
return newnode
|
||||
|
||||
def visit_binop(self, node, parent):
|
||||
"""visit a BinOp node by returning a fresh instance of it"""
|
||||
if isinstance(node.left, BinOp) and self._manager.optimize_ast:
|
||||
# Optimize BinOp operations in order to remove
|
||||
# redundant recursion. For instance, if the
|
||||
# following code is parsed in order to obtain
|
||||
# its ast, then the rebuilder will fail with an
|
||||
# infinite recursion, the same will happen with the
|
||||
# inference engine as well. There's no need to hold
|
||||
# so many objects for the BinOp if they can be reduced
|
||||
# to something else (also, the optimization
|
||||
# might handle only Const binops, which isn't a big
|
||||
# problem for the correctness of the program).
|
||||
#
|
||||
# ("a" + "b" + # one thousand more + "c")
|
||||
newnode = self._peepholer.optimize_binop(node)
|
||||
if newnode:
|
||||
_lineno_parent(node, newnode, parent)
|
||||
return newnode
|
||||
|
||||
newnode = new.BinOp()
|
||||
_lineno_parent(node, newnode, parent)
|
||||
newnode.left = self.visit(node.left, newnode)
|
||||
newnode.right = self.visit(node.right, newnode)
|
||||
newnode.op = _BIN_OP_CLASSES[node.op.__class__]
|
||||
return newnode
|
||||
|
||||
def visit_boolop(self, node, parent):
|
||||
"""visit a BoolOp node by returning a fresh instance of it"""
|
||||
newnode = new.BoolOp()
|
||||
_lineno_parent(node, newnode, parent)
|
||||
newnode.values = [self.visit(child, newnode) for child in node.values]
|
||||
newnode.op = _BOOL_OP_CLASSES[node.op.__class__]
|
||||
return newnode
|
||||
|
||||
def visit_break(self, node, parent):
|
||||
"""visit a Break node by returning a fresh instance of it"""
|
||||
newnode = new.Break()
|
||||
_set_infos(node, newnode, parent)
|
||||
return newnode
|
||||
|
||||
def visit_callfunc(self, node, parent):
|
||||
"""visit a CallFunc node by returning a fresh instance of it"""
|
||||
newnode = new.CallFunc()
|
||||
_lineno_parent(node, newnode, parent)
|
||||
newnode.func = self.visit(node.func, newnode)
|
||||
newnode.args = [self.visit(child, newnode) for child in node.args]
|
||||
if node.starargs is not None:
|
||||
newnode.starargs = self.visit(node.starargs, newnode)
|
||||
if node.kwargs is not None:
|
||||
newnode.kwargs = self.visit(node.kwargs, newnode)
|
||||
for child in node.keywords:
|
||||
newnode.args.append(self.visit(child, newnode))
|
||||
return newnode
|
||||
|
||||
def visit_class(self, node, parent):
|
||||
"""visit a Class node to become astroid"""
|
||||
newnode = new.Class(node.name, None)
|
||||
_lineno_parent(node, newnode, parent)
|
||||
_init_set_doc(node, newnode)
|
||||
newnode.bases = [self.visit(child, newnode) for child in node.bases]
|
||||
newnode.body = [self.visit(child, newnode) for child in node.body]
|
||||
if 'decorator_list' in node._fields and node.decorator_list:# py >= 2.6
|
||||
newnode.decorators = self.visit_decorators(node, newnode)
|
||||
newnode.parent.frame().set_local(newnode.name, newnode)
|
||||
return newnode
|
||||
|
||||
def visit_const(self, node, parent):
|
||||
"""visit a Const node by returning a fresh instance of it"""
|
||||
newnode = new.Const(node.value)
|
||||
_set_infos(node, newnode, parent)
|
||||
return newnode
|
||||
|
||||
def visit_continue(self, node, parent):
|
||||
"""visit a Continue node by returning a fresh instance of it"""
|
||||
newnode = new.Continue()
|
||||
_set_infos(node, newnode, parent)
|
||||
return newnode
|
||||
|
||||
def visit_compare(self, node, parent):
|
||||
"""visit a Compare node by returning a fresh instance of it"""
|
||||
newnode = new.Compare()
|
||||
_lineno_parent(node, newnode, parent)
|
||||
newnode.left = self.visit(node.left, newnode)
|
||||
newnode.ops = [(_CMP_OP_CLASSES[op.__class__], self.visit(expr, newnode))
|
||||
for (op, expr) in zip(node.ops, node.comparators)]
|
||||
return newnode
|
||||
|
||||
def visit_comprehension(self, node, parent):
|
||||
"""visit a Comprehension node by returning a fresh instance of it"""
|
||||
newnode = new.Comprehension()
|
||||
newnode.parent = parent
|
||||
self.asscontext = "Ass"
|
||||
newnode.target = self.visit(node.target, newnode)
|
||||
self.asscontext = None
|
||||
newnode.iter = self.visit(node.iter, newnode)
|
||||
newnode.ifs = [self.visit(child, newnode) for child in node.ifs]
|
||||
return newnode
|
||||
|
||||
def visit_decorators(self, node, parent):
|
||||
"""visit a Decorators node by returning a fresh instance of it"""
|
||||
# /!\ node is actually a _ast.Function node while
|
||||
# parent is a astroid.nodes.Function node
|
||||
newnode = new.Decorators()
|
||||
_lineno_parent(node, newnode, parent)
|
||||
if 'decorators' in node._fields: # py < 2.6, i.e. 2.5
|
||||
decorators = node.decorators
|
||||
else:
|
||||
decorators = node.decorator_list
|
||||
newnode.nodes = [self.visit(child, newnode) for child in decorators]
|
||||
return newnode
|
||||
|
||||
def visit_delete(self, node, parent):
|
||||
"""visit a Delete node by returning a fresh instance of it"""
|
||||
newnode = new.Delete()
|
||||
_lineno_parent(node, newnode, parent)
|
||||
self.asscontext = "Del"
|
||||
newnode.targets = [self.visit(child, newnode) for child in node.targets]
|
||||
self.asscontext = None
|
||||
return newnode
|
||||
|
||||
def visit_dict(self, node, parent):
|
||||
"""visit a Dict node by returning a fresh instance of it"""
|
||||
newnode = new.Dict()
|
||||
_lineno_parent(node, newnode, parent)
|
||||
newnode.items = [(self.visit(key, newnode), self.visit(value, newnode))
|
||||
for key, value in zip(node.keys, node.values)]
|
||||
return newnode
|
||||
|
||||
def visit_dictcomp(self, node, parent):
|
||||
"""visit a DictComp node by returning a fresh instance of it"""
|
||||
newnode = new.DictComp()
|
||||
_lineno_parent(node, newnode, parent)
|
||||
newnode.key = self.visit(node.key, newnode)
|
||||
newnode.value = self.visit(node.value, newnode)
|
||||
newnode.generators = [self.visit(child, newnode)
|
||||
for child in node.generators]
|
||||
return newnode
|
||||
|
||||
def visit_discard(self, node, parent):
|
||||
"""visit a Discard node by returning a fresh instance of it"""
|
||||
newnode = new.Discard()
|
||||
_lineno_parent(node, newnode, parent)
|
||||
newnode.value = self.visit(node.value, newnode)
|
||||
return newnode
|
||||
|
||||
def visit_ellipsis(self, node, parent):
|
||||
"""visit an Ellipsis node by returning a fresh instance of it"""
|
||||
newnode = new.Ellipsis()
|
||||
_set_infos(node, newnode, parent)
|
||||
return newnode
|
||||
|
||||
def visit_emptynode(self, node, parent):
|
||||
"""visit an EmptyNode node by returning a fresh instance of it"""
|
||||
newnode = new.EmptyNode()
|
||||
_set_infos(node, newnode, parent)
|
||||
return newnode
|
||||
|
||||
def visit_excepthandler(self, node, parent):
|
||||
"""visit an ExceptHandler node by returning a fresh instance of it"""
|
||||
newnode = new.ExceptHandler()
|
||||
_lineno_parent(node, newnode, parent)
|
||||
if node.type is not None:
|
||||
newnode.type = self.visit(node.type, newnode)
|
||||
if node.name is not None:
|
||||
# /!\ node.name can be a tuple
|
||||
self.asscontext = "Ass"
|
||||
newnode.name = self.visit(node.name, newnode)
|
||||
self.asscontext = None
|
||||
newnode.body = [self.visit(child, newnode) for child in node.body]
|
||||
return newnode
|
||||
|
||||
def visit_exec(self, node, parent):
|
||||
"""visit an Exec node by returning a fresh instance of it"""
|
||||
newnode = new.Exec()
|
||||
_lineno_parent(node, newnode, parent)
|
||||
newnode.expr = self.visit(node.body, newnode)
|
||||
if node.globals is not None:
|
||||
newnode.globals = self.visit(node.globals, newnode)
|
||||
if node.locals is not None:
|
||||
newnode.locals = self.visit(node.locals, newnode)
|
||||
return newnode
|
||||
|
||||
def visit_extslice(self, node, parent):
|
||||
"""visit an ExtSlice node by returning a fresh instance of it"""
|
||||
newnode = new.ExtSlice()
|
||||
newnode.parent = parent
|
||||
newnode.dims = [self.visit(dim, newnode) for dim in node.dims]
|
||||
return newnode
|
||||
|
||||
def visit_for(self, node, parent):
|
||||
"""visit a For node by returning a fresh instance of it"""
|
||||
newnode = new.For()
|
||||
_lineno_parent(node, newnode, parent)
|
||||
self.asscontext = "Ass"
|
||||
newnode.target = self.visit(node.target, newnode)
|
||||
self.asscontext = None
|
||||
newnode.iter = self.visit(node.iter, newnode)
|
||||
newnode.body = [self.visit(child, newnode) for child in node.body]
|
||||
newnode.orelse = [self.visit(child, newnode) for child in node.orelse]
|
||||
return newnode
|
||||
|
||||
def visit_from(self, node, parent):
|
||||
"""visit a From node by returning a fresh instance of it"""
|
||||
names = [(alias.name, alias.asname) for alias in node.names]
|
||||
newnode = new.From(node.module or '', names, node.level or None)
|
||||
_set_infos(node, newnode, parent)
|
||||
# store From names to add them to locals after building
|
||||
self._from_nodes.append(newnode)
|
||||
return newnode
|
||||
|
||||
def visit_function(self, node, parent):
|
||||
"""visit an Function node to become astroid"""
|
||||
self._global_names.append({})
|
||||
newnode = new.Function(node.name, None)
|
||||
_lineno_parent(node, newnode, parent)
|
||||
_init_set_doc(node, newnode)
|
||||
newnode.args = self.visit(node.args, newnode)
|
||||
newnode.body = [self.visit(child, newnode) for child in node.body]
|
||||
if 'decorators' in node._fields: # py < 2.6
|
||||
attr = 'decorators'
|
||||
else:
|
||||
attr = 'decorator_list'
|
||||
decorators = getattr(node, attr)
|
||||
if decorators:
|
||||
newnode.decorators = self.visit_decorators(node, newnode)
|
||||
if PY3K and node.returns:
|
||||
newnode.returns = self.visit(node.returns, newnode)
|
||||
self._global_names.pop()
|
||||
frame = newnode.parent.frame()
|
||||
if isinstance(frame, new.Class):
|
||||
if newnode.name == '__new__':
|
||||
newnode._type = 'classmethod'
|
||||
else:
|
||||
newnode._type = 'method'
|
||||
if newnode.decorators is not None:
|
||||
for decorator_expr in newnode.decorators.nodes:
|
||||
if isinstance(decorator_expr, new.Name):
|
||||
if decorator_expr.name in ('classmethod', 'staticmethod'):
|
||||
newnode._type = decorator_expr.name
|
||||
elif decorator_expr.name == 'classproperty':
|
||||
newnode._type = 'classmethod'
|
||||
frame.set_local(newnode.name, newnode)
|
||||
return newnode
|
||||
|
||||
def visit_genexpr(self, node, parent):
|
||||
"""visit a GenExpr node by returning a fresh instance of it"""
|
||||
newnode = new.GenExpr()
|
||||
_lineno_parent(node, newnode, parent)
|
||||
newnode.elt = self.visit(node.elt, newnode)
|
||||
newnode.generators = [self.visit(child, newnode) for child in node.generators]
|
||||
return newnode
|
||||
|
||||
def visit_getattr(self, node, parent):
|
||||
"""visit a Getattr node by returning a fresh instance of it"""
|
||||
if self.asscontext == "Del":
|
||||
# FIXME : maybe we should reintroduce and visit_delattr ?
|
||||
# for instance, deactivating asscontext
|
||||
newnode = new.DelAttr()
|
||||
elif self.asscontext == "Ass":
|
||||
# FIXME : maybe we should call visit_assattr ?
|
||||
newnode = new.AssAttr()
|
||||
self._delayed_assattr.append(newnode)
|
||||
else:
|
||||
newnode = new.Getattr()
|
||||
_lineno_parent(node, newnode, parent)
|
||||
asscontext, self.asscontext = self.asscontext, None
|
||||
newnode.expr = self.visit(node.value, newnode)
|
||||
self.asscontext = asscontext
|
||||
newnode.attrname = node.attr
|
||||
return newnode
|
||||
|
||||
def visit_global(self, node, parent):
|
||||
"""visit an Global node to become astroid"""
|
||||
newnode = new.Global(node.names)
|
||||
_set_infos(node, newnode, parent)
|
||||
if self._global_names: # global at the module level, no effect
|
||||
for name in node.names:
|
||||
self._global_names[-1].setdefault(name, []).append(newnode)
|
||||
return newnode
|
||||
|
||||
def visit_if(self, node, parent):
|
||||
"""visit a If node by returning a fresh instance of it"""
|
||||
newnode = new.If()
|
||||
_lineno_parent(node, newnode, parent)
|
||||
newnode.test = self.visit(node.test, newnode)
|
||||
newnode.body = [self.visit(child, newnode) for child in node.body]
|
||||
newnode.orelse = [self.visit(child, newnode) for child in node.orelse]
|
||||
return newnode
|
||||
|
||||
def visit_ifexp(self, node, parent):
|
||||
"""visit a IfExp node by returning a fresh instance of it"""
|
||||
newnode = new.IfExp()
|
||||
_lineno_parent(node, newnode, parent)
|
||||
newnode.test = self.visit(node.test, newnode)
|
||||
newnode.body = self.visit(node.body, newnode)
|
||||
newnode.orelse = self.visit(node.orelse, newnode)
|
||||
return newnode
|
||||
|
||||
def visit_import(self, node, parent):
|
||||
"""visit a Import node by returning a fresh instance of it"""
|
||||
newnode = new.Import()
|
||||
_set_infos(node, newnode, parent)
|
||||
newnode.names = [(alias.name, alias.asname) for alias in node.names]
|
||||
# save import names in parent's locals:
|
||||
for (name, asname) in newnode.names:
|
||||
name = asname or name
|
||||
newnode.parent.set_local(name.split('.')[0], newnode)
|
||||
return newnode
|
||||
|
||||
def visit_index(self, node, parent):
|
||||
"""visit a Index node by returning a fresh instance of it"""
|
||||
newnode = new.Index()
|
||||
newnode.parent = parent
|
||||
newnode.value = self.visit(node.value, newnode)
|
||||
return newnode
|
||||
|
||||
def visit_keyword(self, node, parent):
|
||||
"""visit a Keyword node by returning a fresh instance of it"""
|
||||
newnode = new.Keyword()
|
||||
newnode.parent = parent
|
||||
newnode.arg = node.arg
|
||||
newnode.value = self.visit(node.value, newnode)
|
||||
return newnode
|
||||
|
||||
def visit_lambda(self, node, parent):
|
||||
"""visit a Lambda node by returning a fresh instance of it"""
|
||||
newnode = new.Lambda()
|
||||
_lineno_parent(node, newnode, parent)
|
||||
newnode.args = self.visit(node.args, newnode)
|
||||
newnode.body = self.visit(node.body, newnode)
|
||||
return newnode
|
||||
|
||||
def visit_list(self, node, parent):
|
||||
"""visit a List node by returning a fresh instance of it"""
|
||||
newnode = new.List()
|
||||
_lineno_parent(node, newnode, parent)
|
||||
newnode.elts = [self.visit(child, newnode) for child in node.elts]
|
||||
return newnode
|
||||
|
||||
def visit_listcomp(self, node, parent):
|
||||
"""visit a ListComp node by returning a fresh instance of it"""
|
||||
newnode = new.ListComp()
|
||||
_lineno_parent(node, newnode, parent)
|
||||
newnode.elt = self.visit(node.elt, newnode)
|
||||
newnode.generators = [self.visit(child, newnode)
|
||||
for child in node.generators]
|
||||
return newnode
|
||||
|
||||
def visit_name(self, node, parent):
|
||||
"""visit a Name node by returning a fresh instance of it"""
|
||||
# True and False can be assigned to something in py2x, so we have to
|
||||
# check first the asscontext
|
||||
if self.asscontext == "Del":
|
||||
newnode = new.DelName()
|
||||
elif self.asscontext is not None: # Ass
|
||||
assert self.asscontext == "Ass"
|
||||
newnode = new.AssName()
|
||||
elif node.id in CONST_NAME_TRANSFORMS:
|
||||
newnode = new.Const(CONST_NAME_TRANSFORMS[node.id])
|
||||
_set_infos(node, newnode, parent)
|
||||
return newnode
|
||||
else:
|
||||
newnode = new.Name()
|
||||
_lineno_parent(node, newnode, parent)
|
||||
newnode.name = node.id
|
||||
# XXX REMOVE me :
|
||||
if self.asscontext in ('Del', 'Ass'): # 'Aug' ??
|
||||
self._save_assignment(newnode)
|
||||
return newnode
|
||||
|
||||
def visit_bytes(self, node, parent):
|
||||
"""visit a Bytes node by returning a fresh instance of Const"""
|
||||
newnode = new.Const(node.s)
|
||||
_set_infos(node, newnode, parent)
|
||||
return newnode
|
||||
|
||||
def visit_num(self, node, parent):
|
||||
"""visit a Num node by returning a fresh instance of Const"""
|
||||
newnode = new.Const(node.n)
|
||||
_set_infos(node, newnode, parent)
|
||||
return newnode
|
||||
|
||||
def visit_pass(self, node, parent):
|
||||
"""visit a Pass node by returning a fresh instance of it"""
|
||||
newnode = new.Pass()
|
||||
_set_infos(node, newnode, parent)
|
||||
return newnode
|
||||
|
||||
def visit_str(self, node, parent):
|
||||
"""visit a Str node by returning a fresh instance of Const"""
|
||||
newnode = new.Const(node.s)
|
||||
_set_infos(node, newnode, parent)
|
||||
return newnode
|
||||
|
||||
def visit_print(self, node, parent):
|
||||
"""visit a Print node by returning a fresh instance of it"""
|
||||
newnode = new.Print()
|
||||
_lineno_parent(node, newnode, parent)
|
||||
newnode.nl = node.nl
|
||||
if node.dest is not None:
|
||||
newnode.dest = self.visit(node.dest, newnode)
|
||||
newnode.values = [self.visit(child, newnode) for child in node.values]
|
||||
return newnode
|
||||
|
||||
def visit_raise(self, node, parent):
|
||||
"""visit a Raise node by returning a fresh instance of it"""
|
||||
newnode = new.Raise()
|
||||
_lineno_parent(node, newnode, parent)
|
||||
if node.type is not None:
|
||||
newnode.exc = self.visit(node.type, newnode)
|
||||
if node.inst is not None:
|
||||
newnode.inst = self.visit(node.inst, newnode)
|
||||
if node.tback is not None:
|
||||
newnode.tback = self.visit(node.tback, newnode)
|
||||
return newnode
|
||||
|
||||
def visit_return(self, node, parent):
|
||||
"""visit a Return node by returning a fresh instance of it"""
|
||||
newnode = new.Return()
|
||||
_lineno_parent(node, newnode, parent)
|
||||
if node.value is not None:
|
||||
newnode.value = self.visit(node.value, newnode)
|
||||
return newnode
|
||||
|
||||
def visit_set(self, node, parent):
|
||||
"""visit a Set node by returning a fresh instance of it"""
|
||||
newnode = new.Set()
|
||||
_lineno_parent(node, newnode, parent)
|
||||
newnode.elts = [self.visit(child, newnode) for child in node.elts]
|
||||
return newnode
|
||||
|
||||
def visit_setcomp(self, node, parent):
|
||||
"""visit a SetComp node by returning a fresh instance of it"""
|
||||
newnode = new.SetComp()
|
||||
_lineno_parent(node, newnode, parent)
|
||||
newnode.elt = self.visit(node.elt, newnode)
|
||||
newnode.generators = [self.visit(child, newnode)
|
||||
for child in node.generators]
|
||||
return newnode
|
||||
|
||||
def visit_slice(self, node, parent):
|
||||
"""visit a Slice node by returning a fresh instance of it"""
|
||||
newnode = new.Slice()
|
||||
newnode.parent = parent
|
||||
if node.lower is not None:
|
||||
newnode.lower = self.visit(node.lower, newnode)
|
||||
if node.upper is not None:
|
||||
newnode.upper = self.visit(node.upper, newnode)
|
||||
if node.step is not None:
|
||||
newnode.step = self.visit(node.step, newnode)
|
||||
return newnode
|
||||
|
||||
def visit_subscript(self, node, parent):
|
||||
"""visit a Subscript node by returning a fresh instance of it"""
|
||||
newnode = new.Subscript()
|
||||
_lineno_parent(node, newnode, parent)
|
||||
subcontext, self.asscontext = self.asscontext, None
|
||||
newnode.value = self.visit(node.value, newnode)
|
||||
newnode.slice = self.visit(node.slice, newnode)
|
||||
self.asscontext = subcontext
|
||||
return newnode
|
||||
|
||||
def visit_tryexcept(self, node, parent):
|
||||
"""visit a TryExcept node by returning a fresh instance of it"""
|
||||
newnode = new.TryExcept()
|
||||
_lineno_parent(node, newnode, parent)
|
||||
newnode.body = [self.visit(child, newnode) for child in node.body]
|
||||
newnode.handlers = [self.visit(child, newnode) for child in node.handlers]
|
||||
newnode.orelse = [self.visit(child, newnode) for child in node.orelse]
|
||||
return newnode
|
||||
|
||||
def visit_tryfinally(self, node, parent):
|
||||
"""visit a TryFinally node by returning a fresh instance of it"""
|
||||
newnode = new.TryFinally()
|
||||
_lineno_parent(node, newnode, parent)
|
||||
newnode.body = [self.visit(child, newnode) for child in node.body]
|
||||
newnode.finalbody = [self.visit(n, newnode) for n in node.finalbody]
|
||||
return newnode
|
||||
|
||||
def visit_tuple(self, node, parent):
|
||||
"""visit a Tuple node by returning a fresh instance of it"""
|
||||
newnode = new.Tuple()
|
||||
_lineno_parent(node, newnode, parent)
|
||||
newnode.elts = [self.visit(child, newnode) for child in node.elts]
|
||||
return newnode
|
||||
|
||||
def visit_unaryop(self, node, parent):
|
||||
"""visit a UnaryOp node by returning a fresh instance of it"""
|
||||
newnode = new.UnaryOp()
|
||||
_lineno_parent(node, newnode, parent)
|
||||
newnode.operand = self.visit(node.operand, newnode)
|
||||
newnode.op = _UNARY_OP_CLASSES[node.op.__class__]
|
||||
return newnode
|
||||
|
||||
def visit_while(self, node, parent):
|
||||
"""visit a While node by returning a fresh instance of it"""
|
||||
newnode = new.While()
|
||||
_lineno_parent(node, newnode, parent)
|
||||
newnode.test = self.visit(node.test, newnode)
|
||||
newnode.body = [self.visit(child, newnode) for child in node.body]
|
||||
newnode.orelse = [self.visit(child, newnode) for child in node.orelse]
|
||||
return newnode
|
||||
|
||||
def visit_with(self, node, parent):
|
||||
newnode = new.With()
|
||||
_lineno_parent(node, newnode, parent)
|
||||
expr = self.visit(node.context_expr, newnode)
|
||||
self.asscontext = "Ass"
|
||||
if node.optional_vars is not None:
|
||||
vars = self.visit(node.optional_vars, newnode)
|
||||
else:
|
||||
vars = None
|
||||
self.asscontext = None
|
||||
newnode.items = [(expr, vars)]
|
||||
newnode.body = [self.visit(child, newnode) for child in node.body]
|
||||
return newnode
|
||||
|
||||
def visit_yield(self, node, parent):
|
||||
"""visit a Yield node by returning a fresh instance of it"""
|
||||
return _create_yield_node(node, parent, self, new.Yield)
|
||||
|
||||
class TreeRebuilder3k(TreeRebuilder):
|
||||
"""extend and overwrite TreeRebuilder for python3k"""
|
||||
|
||||
def visit_arg(self, node, parent):
|
||||
"""visit a arg node by returning a fresh AssName instance"""
|
||||
# the <arg> node is coming from py>=3.0, but we use AssName in py2.x
|
||||
# XXX or we should instead introduce a Arg node in astroid ?
|
||||
return self.visit_assname(node, parent, node.arg)
|
||||
|
||||
def visit_nameconstant(self, node, parent):
|
||||
# in Python 3.4 we have NameConstant for True / False / None
|
||||
newnode = new.Const(node.value)
|
||||
_set_infos(node, newnode, parent)
|
||||
return newnode
|
||||
|
||||
def visit_arguments(self, node, parent):
|
||||
newnode = super(TreeRebuilder3k, self).visit_arguments(node, parent)
|
||||
self.asscontext = "Ass"
|
||||
newnode.kwonlyargs = [self.visit(child, newnode) for child in node.kwonlyargs]
|
||||
self.asscontext = None
|
||||
newnode.kw_defaults = [self.visit(child, newnode) if child else None for child in node.kw_defaults]
|
||||
newnode.annotations = [
|
||||
self.visit(arg.annotation, newnode) if arg.annotation else None
|
||||
for arg in node.args]
|
||||
return newnode
|
||||
|
||||
def visit_excepthandler(self, node, parent):
|
||||
"""visit an ExceptHandler node by returning a fresh instance of it"""
|
||||
newnode = new.ExceptHandler()
|
||||
_lineno_parent(node, newnode, parent)
|
||||
if node.type is not None:
|
||||
newnode.type = self.visit(node.type, newnode)
|
||||
if node.name is not None:
|
||||
newnode.name = self.visit_assname(node, newnode, node.name)
|
||||
newnode.body = [self.visit(child, newnode) for child in node.body]
|
||||
return newnode
|
||||
|
||||
def visit_nonlocal(self, node, parent):
|
||||
"""visit a Nonlocal node and return a new instance of it"""
|
||||
newnode = new.Nonlocal(node.names)
|
||||
_set_infos(node, newnode, parent)
|
||||
return newnode
|
||||
|
||||
def visit_raise(self, node, parent):
|
||||
"""visit a Raise node by returning a fresh instance of it"""
|
||||
newnode = new.Raise()
|
||||
_lineno_parent(node, newnode, parent)
|
||||
# no traceback; anyway it is not used in Pylint
|
||||
if node.exc is not None:
|
||||
newnode.exc = self.visit(node.exc, newnode)
|
||||
if node.cause is not None:
|
||||
newnode.cause = self.visit(node.cause, newnode)
|
||||
return newnode
|
||||
|
||||
def visit_starred(self, node, parent):
|
||||
"""visit a Starred node and return a new instance of it"""
|
||||
newnode = new.Starred()
|
||||
_lineno_parent(node, newnode, parent)
|
||||
newnode.value = self.visit(node.value, newnode)
|
||||
return newnode
|
||||
|
||||
def visit_try(self, node, parent):
|
||||
# python 3.3 introduce a new Try node replacing TryFinally/TryExcept nodes
|
||||
if node.finalbody:
|
||||
newnode = new.TryFinally()
|
||||
_lineno_parent(node, newnode, parent)
|
||||
newnode.finalbody = [self.visit(n, newnode) for n in node.finalbody]
|
||||
if node.handlers:
|
||||
excnode = new.TryExcept()
|
||||
_lineno_parent(node, excnode, newnode)
|
||||
excnode.body = [self.visit(child, excnode) for child in node.body]
|
||||
excnode.handlers = [self.visit(child, excnode) for child in node.handlers]
|
||||
excnode.orelse = [self.visit(child, excnode) for child in node.orelse]
|
||||
newnode.body = [excnode]
|
||||
else:
|
||||
newnode.body = [self.visit(child, newnode) for child in node.body]
|
||||
elif node.handlers:
|
||||
newnode = new.TryExcept()
|
||||
_lineno_parent(node, newnode, parent)
|
||||
newnode.body = [self.visit(child, newnode) for child in node.body]
|
||||
newnode.handlers = [self.visit(child, newnode) for child in node.handlers]
|
||||
newnode.orelse = [self.visit(child, newnode) for child in node.orelse]
|
||||
return newnode
|
||||
|
||||
def visit_with(self, node, parent):
|
||||
if 'items' not in node._fields:
|
||||
# python < 3.3
|
||||
return super(TreeRebuilder3k, self).visit_with(node, parent)
|
||||
|
||||
newnode = new.With()
|
||||
_lineno_parent(node, newnode, parent)
|
||||
def visit_child(child):
|
||||
expr = self.visit(child.context_expr, newnode)
|
||||
self.asscontext = 'Ass'
|
||||
if child.optional_vars:
|
||||
var = self.visit(child.optional_vars, newnode)
|
||||
else:
|
||||
var = None
|
||||
self.asscontext = None
|
||||
return expr, var
|
||||
newnode.items = [visit_child(child)
|
||||
for child in node.items]
|
||||
newnode.body = [self.visit(child, newnode) for child in node.body]
|
||||
return newnode
|
||||
|
||||
def visit_yieldfrom(self, node, parent):
|
||||
return _create_yield_node(node, parent, self, new.YieldFrom)
|
||||
|
||||
def visit_class(self, node, parent):
|
||||
newnode = super(TreeRebuilder3k, self).visit_class(node, parent)
|
||||
newnode._newstyle = True
|
||||
for keyword in node.keywords:
|
||||
if keyword.arg == 'metaclass':
|
||||
newnode._metaclass = self.visit(keyword, newnode).value
|
||||
break
|
||||
return newnode
|
||||
|
||||
if sys.version_info >= (3, 0):
|
||||
TreeRebuilder = TreeRebuilder3k
|
||||
|
||||
|
||||
File diff suppressed because it is too large
Load diff
|
|
@ -1,218 +0,0 @@
|
|||
"""Utility functions for test code that uses astroid ASTs as input."""
|
||||
import functools
|
||||
import sys
|
||||
import textwrap
|
||||
|
||||
from astroid import nodes
|
||||
from astroid import builder
|
||||
# The name of the transient function that is used to
|
||||
# wrap expressions to be extracted when calling
|
||||
# extract_node.
|
||||
_TRANSIENT_FUNCTION = '__'
|
||||
|
||||
# The comment used to select a statement to be extracted
|
||||
# when calling extract_node.
|
||||
_STATEMENT_SELECTOR = '#@'
|
||||
|
||||
|
||||
def _extract_expressions(node):
|
||||
"""Find expressions in a call to _TRANSIENT_FUNCTION and extract them.
|
||||
|
||||
The function walks the AST recursively to search for expressions that
|
||||
are wrapped into a call to _TRANSIENT_FUNCTION. If it finds such an
|
||||
expression, it completely removes the function call node from the tree,
|
||||
replacing it by the wrapped expression inside the parent.
|
||||
|
||||
:param node: An astroid node.
|
||||
:type node: astroid.bases.NodeNG
|
||||
:yields: The sequence of wrapped expressions on the modified tree
|
||||
expression can be found.
|
||||
"""
|
||||
if (isinstance(node, nodes.CallFunc)
|
||||
and isinstance(node.func, nodes.Name)
|
||||
and node.func.name == _TRANSIENT_FUNCTION):
|
||||
real_expr = node.args[0]
|
||||
real_expr.parent = node.parent
|
||||
# Search for node in all _astng_fields (the fields checked when
|
||||
# get_children is called) of its parent. Some of those fields may
|
||||
# be lists or tuples, in which case the elements need to be checked.
|
||||
# When we find it, replace it by real_expr, so that the AST looks
|
||||
# like no call to _TRANSIENT_FUNCTION ever took place.
|
||||
for name in node.parent._astroid_fields:
|
||||
child = getattr(node.parent, name)
|
||||
if isinstance(child, (list, tuple)):
|
||||
for idx, compound_child in enumerate(child):
|
||||
if compound_child is node:
|
||||
child[idx] = real_expr
|
||||
elif child is node:
|
||||
setattr(node.parent, name, real_expr)
|
||||
yield real_expr
|
||||
else:
|
||||
for child in node.get_children():
|
||||
for result in _extract_expressions(child):
|
||||
yield result
|
||||
|
||||
|
||||
def _find_statement_by_line(node, line):
|
||||
"""Extracts the statement on a specific line from an AST.
|
||||
|
||||
If the line number of node matches line, it will be returned;
|
||||
otherwise its children are iterated and the function is called
|
||||
recursively.
|
||||
|
||||
:param node: An astroid node.
|
||||
:type node: astroid.bases.NodeNG
|
||||
:param line: The line number of the statement to extract.
|
||||
:type line: int
|
||||
:returns: The statement on the line, or None if no statement for the line
|
||||
can be found.
|
||||
:rtype: astroid.bases.NodeNG or None
|
||||
"""
|
||||
if isinstance(node, (nodes.Class, nodes.Function)):
|
||||
# This is an inaccuracy in the AST: the nodes that can be
|
||||
# decorated do not carry explicit information on which line
|
||||
# the actual definition (class/def), but .fromline seems to
|
||||
# be close enough.
|
||||
node_line = node.fromlineno
|
||||
else:
|
||||
node_line = node.lineno
|
||||
|
||||
if node_line == line:
|
||||
return node
|
||||
|
||||
for child in node.get_children():
|
||||
result = _find_statement_by_line(child, line)
|
||||
if result:
|
||||
return result
|
||||
|
||||
return None
|
||||
|
||||
def extract_node(code, module_name=''):
|
||||
"""Parses some Python code as a module and extracts a designated AST node.
|
||||
|
||||
Statements:
|
||||
To extract one or more statement nodes, append #@ to the end of the line
|
||||
|
||||
Examples:
|
||||
>>> def x():
|
||||
>>> def y():
|
||||
>>> return 1 #@
|
||||
|
||||
The return statement will be extracted.
|
||||
|
||||
>>> class X(object):
|
||||
>>> def meth(self): #@
|
||||
>>> pass
|
||||
|
||||
The funcion object 'meth' will be extracted.
|
||||
|
||||
Expressions:
|
||||
To extract arbitrary expressions, surround them with the fake
|
||||
function call __(...). After parsing, the surrounded expression
|
||||
will be returned and the whole AST (accessible via the returned
|
||||
node's parent attribute) will look like the function call was
|
||||
never there in the first place.
|
||||
|
||||
Examples:
|
||||
>>> a = __(1)
|
||||
|
||||
The const node will be extracted.
|
||||
|
||||
>>> def x(d=__(foo.bar)): pass
|
||||
|
||||
The node containing the default argument will be extracted.
|
||||
|
||||
>>> def foo(a, b):
|
||||
>>> return 0 < __(len(a)) < b
|
||||
|
||||
The node containing the function call 'len' will be extracted.
|
||||
|
||||
If no statements or expressions are selected, the last toplevel
|
||||
statement will be returned.
|
||||
|
||||
If the selected statement is a discard statement, (i.e. an expression
|
||||
turned into a statement), the wrapped expression is returned instead.
|
||||
|
||||
For convenience, singleton lists are unpacked.
|
||||
|
||||
:param str code: A piece of Python code that is parsed as
|
||||
a module. Will be passed through textwrap.dedent first.
|
||||
:param str module_name: The name of the module.
|
||||
:returns: The designated node from the parse tree, or a list of nodes.
|
||||
:rtype: astroid.bases.NodeNG, or a list of nodes.
|
||||
"""
|
||||
def _extract(node):
|
||||
if isinstance(node, nodes.Discard):
|
||||
return node.value
|
||||
else:
|
||||
return node
|
||||
|
||||
requested_lines = []
|
||||
for idx, line in enumerate(code.splitlines()):
|
||||
if line.strip().endswith(_STATEMENT_SELECTOR):
|
||||
requested_lines.append(idx + 1)
|
||||
|
||||
tree = build_module(code, module_name=module_name)
|
||||
extracted = []
|
||||
if requested_lines:
|
||||
for line in requested_lines:
|
||||
extracted.append(_find_statement_by_line(tree, line))
|
||||
|
||||
# Modifies the tree.
|
||||
extracted.extend(_extract_expressions(tree))
|
||||
|
||||
if not extracted:
|
||||
extracted.append(tree.body[-1])
|
||||
|
||||
extracted = [_extract(node) for node in extracted]
|
||||
if len(extracted) == 1:
|
||||
return extracted[0]
|
||||
else:
|
||||
return extracted
|
||||
|
||||
|
||||
def build_module(code, module_name='', path=None):
|
||||
"""Parses a string module with a builder.
|
||||
:param code: The code for the module.
|
||||
:type code: str
|
||||
:param module_name: The name for the module
|
||||
:type module_name: str
|
||||
:param path: The path for the module
|
||||
:type module_name: str
|
||||
:returns: The module AST.
|
||||
:rtype: astroid.bases.NodeNG
|
||||
"""
|
||||
code = textwrap.dedent(code)
|
||||
return builder.AstroidBuilder(None).string_build(code, modname=module_name, path=path)
|
||||
|
||||
|
||||
def require_version(minver=None, maxver=None):
|
||||
""" Compare version of python interpreter to the given one. Skip the test
|
||||
if older.
|
||||
"""
|
||||
def parse(string, default=None):
|
||||
string = string or default
|
||||
try:
|
||||
return tuple(int(v) for v in string.split('.'))
|
||||
except ValueError:
|
||||
raise ValueError('%s is not a correct version : should be X.Y[.Z].' % version)
|
||||
|
||||
def check_require_version(f):
|
||||
current = sys.version_info[:3]
|
||||
if parse(minver, "0") < current <= parse(maxver, "4"):
|
||||
return f
|
||||
else:
|
||||
str_version = '.'.join(str(v) for v in sys.version_info)
|
||||
@functools.wraps(f)
|
||||
def new_f(self, *args, **kwargs):
|
||||
if minver is not None:
|
||||
self.skipTest('Needs Python > %s. Current version is %s.' % (minver, str_version))
|
||||
elif maxver is not None:
|
||||
self.skipTest('Needs Python <= %s. Current version is %s.' % (maxver, str_version))
|
||||
return new_f
|
||||
|
||||
|
||||
return check_require_version
|
||||
|
||||
def get_name_node(start_from, name, index=0):
|
||||
return [n for n in start_from.nodes_of_class(nodes.Name) if n.name == name][index]
|
||||
|
|
@ -1,239 +0,0 @@
|
|||
# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
|
||||
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
|
||||
#
|
||||
# This file is part of astroid.
|
||||
#
|
||||
# astroid is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU Lesser General Public License as published by the
|
||||
# Free Software Foundation, either version 2.1 of the License, or (at your
|
||||
# option) any later version.
|
||||
#
|
||||
# astroid is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
|
||||
# for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License along
|
||||
# with astroid. If not, see <http://www.gnu.org/licenses/>.
|
||||
"""this module contains some utilities to navigate in the tree or to
|
||||
extract information from it
|
||||
"""
|
||||
from __future__ import print_function
|
||||
|
||||
__docformat__ = "restructuredtext en"
|
||||
|
||||
from astroid.exceptions import AstroidBuildingException
|
||||
from astroid.builder import parse
|
||||
|
||||
|
||||
class ASTWalker(object):
|
||||
"""a walker visiting a tree in preorder, calling on the handler:
|
||||
|
||||
* visit_<class name> on entering a node, where class name is the class of
|
||||
the node in lower case
|
||||
|
||||
* leave_<class name> on leaving a node, where class name is the class of
|
||||
the node in lower case
|
||||
"""
|
||||
|
||||
def __init__(self, handler):
|
||||
self.handler = handler
|
||||
self._cache = {}
|
||||
|
||||
def walk(self, node, _done=None):
|
||||
"""walk on the tree from <node>, getting callbacks from handler"""
|
||||
if _done is None:
|
||||
_done = set()
|
||||
if node in _done:
|
||||
raise AssertionError((id(node), node, node.parent))
|
||||
_done.add(node)
|
||||
self.visit(node)
|
||||
for child_node in node.get_children():
|
||||
self.handler.set_context(node, child_node)
|
||||
assert child_node is not node
|
||||
self.walk(child_node, _done)
|
||||
self.leave(node)
|
||||
assert node.parent is not node
|
||||
|
||||
def get_callbacks(self, node):
|
||||
"""get callbacks from handler for the visited node"""
|
||||
klass = node.__class__
|
||||
methods = self._cache.get(klass)
|
||||
if methods is None:
|
||||
handler = self.handler
|
||||
kid = klass.__name__.lower()
|
||||
e_method = getattr(handler, 'visit_%s' % kid,
|
||||
getattr(handler, 'visit_default', None))
|
||||
l_method = getattr(handler, 'leave_%s' % kid,
|
||||
getattr(handler, 'leave_default', None))
|
||||
self._cache[klass] = (e_method, l_method)
|
||||
else:
|
||||
e_method, l_method = methods
|
||||
return e_method, l_method
|
||||
|
||||
def visit(self, node):
|
||||
"""walk on the tree from <node>, getting callbacks from handler"""
|
||||
method = self.get_callbacks(node)[0]
|
||||
if method is not None:
|
||||
method(node)
|
||||
|
||||
def leave(self, node):
|
||||
"""walk on the tree from <node>, getting callbacks from handler"""
|
||||
method = self.get_callbacks(node)[1]
|
||||
if method is not None:
|
||||
method(node)
|
||||
|
||||
|
||||
class LocalsVisitor(ASTWalker):
|
||||
"""visit a project by traversing the locals dictionary"""
|
||||
def __init__(self):
|
||||
ASTWalker.__init__(self, self)
|
||||
self._visited = {}
|
||||
|
||||
def visit(self, node):
|
||||
"""launch the visit starting from the given node"""
|
||||
if node in self._visited:
|
||||
return
|
||||
self._visited[node] = 1 # FIXME: use set ?
|
||||
methods = self.get_callbacks(node)
|
||||
if methods[0] is not None:
|
||||
methods[0](node)
|
||||
if 'locals' in node.__dict__: # skip Instance and other proxy
|
||||
for local_node in node.values():
|
||||
self.visit(local_node)
|
||||
if methods[1] is not None:
|
||||
return methods[1](node)
|
||||
|
||||
|
||||
def _check_children(node):
|
||||
"""a helper function to check children - parent relations"""
|
||||
for child in node.get_children():
|
||||
ok = False
|
||||
if child is None:
|
||||
print("Hm, child of %s is None" % node)
|
||||
continue
|
||||
if not hasattr(child, 'parent'):
|
||||
print(" ERROR: %s has child %s %x with no parent" % (
|
||||
node, child, id(child)))
|
||||
elif not child.parent:
|
||||
print(" ERROR: %s has child %s %x with parent %r" % (
|
||||
node, child, id(child), child.parent))
|
||||
elif child.parent is not node:
|
||||
print(" ERROR: %s %x has child %s %x with wrong parent %s" % (
|
||||
node, id(node), child, id(child), child.parent))
|
||||
else:
|
||||
ok = True
|
||||
if not ok:
|
||||
print("lines;", node.lineno, child.lineno)
|
||||
print("of module", node.root(), node.root().name)
|
||||
raise AstroidBuildingException
|
||||
_check_children(child)
|
||||
|
||||
|
||||
class TreeTester(object):
|
||||
'''A helper class to see _ast tree and compare with astroid tree
|
||||
|
||||
indent: string for tree indent representation
|
||||
lineno: bool to tell if we should print the line numbers
|
||||
|
||||
>>> tester = TreeTester('print')
|
||||
>>> print tester.native_tree_repr()
|
||||
|
||||
<Module>
|
||||
. body = [
|
||||
. <Print>
|
||||
. . nl = True
|
||||
. ]
|
||||
>>> print tester.astroid_tree_repr()
|
||||
Module()
|
||||
body = [
|
||||
Print()
|
||||
dest =
|
||||
values = [
|
||||
]
|
||||
]
|
||||
'''
|
||||
|
||||
indent = '. '
|
||||
lineno = False
|
||||
|
||||
def __init__(self, sourcecode):
|
||||
self._string = ''
|
||||
self.sourcecode = sourcecode
|
||||
self._ast_node = None
|
||||
self.build_ast()
|
||||
|
||||
def build_ast(self):
|
||||
"""build the _ast tree from the source code"""
|
||||
self._ast_node = parse(self.sourcecode)
|
||||
|
||||
def native_tree_repr(self, node=None, indent=''):
|
||||
"""get a nice representation of the _ast tree"""
|
||||
self._string = ''
|
||||
if node is None:
|
||||
node = self._ast_node
|
||||
self._native_repr_tree(node, indent)
|
||||
return self._string
|
||||
|
||||
|
||||
def _native_repr_tree(self, node, indent, _done=None):
|
||||
"""recursive method for the native tree representation"""
|
||||
from _ast import Load as _Load, Store as _Store, Del as _Del
|
||||
from _ast import AST as Node
|
||||
if _done is None:
|
||||
_done = set()
|
||||
if node in _done:
|
||||
self._string += '\nloop in tree: %r (%s)' % (
|
||||
node, getattr(node, 'lineno', None))
|
||||
return
|
||||
_done.add(node)
|
||||
self._string += '\n' + indent + '<%s>' % node.__class__.__name__
|
||||
indent += self.indent
|
||||
if not hasattr(node, '__dict__'):
|
||||
self._string += '\n' + self.indent + " ** node has no __dict__ " + str(node)
|
||||
return
|
||||
node_dict = node.__dict__
|
||||
if hasattr(node, '_attributes'):
|
||||
for a in node._attributes:
|
||||
attr = node_dict[a]
|
||||
if attr is None:
|
||||
continue
|
||||
if a in ("lineno", "col_offset") and not self.lineno:
|
||||
continue
|
||||
self._string += '\n' + indent + a + " = " + repr(attr)
|
||||
for field in node._fields or ():
|
||||
attr = node_dict[field]
|
||||
if attr is None:
|
||||
continue
|
||||
if isinstance(attr, list):
|
||||
if not attr:
|
||||
continue
|
||||
self._string += '\n' + indent + field + ' = ['
|
||||
for elt in attr:
|
||||
self._native_repr_tree(elt, indent, _done)
|
||||
self._string += '\n' + indent + ']'
|
||||
continue
|
||||
if isinstance(attr, (_Load, _Store, _Del)):
|
||||
continue
|
||||
if isinstance(attr, Node):
|
||||
self._string += '\n' + indent + field + " = "
|
||||
self._native_repr_tree(attr, indent, _done)
|
||||
else:
|
||||
self._string += '\n' + indent + field + " = " + repr(attr)
|
||||
|
||||
|
||||
def build_astroid_tree(self):
|
||||
"""build astroid tree from the _ast tree
|
||||
"""
|
||||
from astroid.builder import AstroidBuilder
|
||||
tree = AstroidBuilder().string_build(self.sourcecode)
|
||||
return tree
|
||||
|
||||
def astroid_tree_repr(self, ids=False):
|
||||
"""build the astroid tree and return a nice tree representation"""
|
||||
mod = self.build_astroid_tree()
|
||||
return mod.repr_tree(ids)
|
||||
|
||||
|
||||
__all__ = ('LocalsVisitor', 'ASTWalker',)
|
||||
|
||||
|
|
@ -1,5 +0,0 @@
|
|||
"""Run the EasyInstall command"""
|
||||
|
||||
if __name__ == '__main__':
|
||||
from setuptools.command.easy_install import main
|
||||
main()
|
||||
Binary file not shown.
|
|
@ -1,184 +0,0 @@
|
|||
# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
|
||||
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
|
||||
#
|
||||
# This file is part of logilab-common.
|
||||
#
|
||||
# logilab-common is free software: you can redistribute it and/or modify it under
|
||||
# the terms of the GNU Lesser General Public License as published by the Free
|
||||
# Software Foundation, either version 2.1 of the License, or (at your option) any
|
||||
# later version.
|
||||
#
|
||||
# logilab-common is distributed in the hope that it will be useful, but WITHOUT
|
||||
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
|
||||
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
|
||||
# details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License along
|
||||
# with logilab-common. If not, see <http://www.gnu.org/licenses/>.
|
||||
"""Logilab common library (aka Logilab's extension to the standard library).
|
||||
|
||||
:type STD_BLACKLIST: tuple
|
||||
:var STD_BLACKLIST: directories ignored by default by the functions in
|
||||
this package which have to recurse into directories
|
||||
|
||||
:type IGNORED_EXTENSIONS: tuple
|
||||
:var IGNORED_EXTENSIONS: file extensions that may usually be ignored
|
||||
"""
|
||||
__docformat__ = "restructuredtext en"
|
||||
|
||||
import sys
|
||||
import types
|
||||
import pkg_resources
|
||||
|
||||
__version__ = pkg_resources.get_distribution('logilab-common').version
|
||||
|
||||
# deprecated, but keep compatibility with pylint < 1.4.4
|
||||
__pkginfo__ = types.ModuleType('__pkginfo__')
|
||||
__pkginfo__.__package__ = __name__
|
||||
__pkginfo__.version = __version__
|
||||
sys.modules['logilab.common.__pkginfo__'] = __pkginfo__
|
||||
|
||||
STD_BLACKLIST = ('CVS', '.svn', '.hg', 'debian', 'dist', 'build')
|
||||
|
||||
IGNORED_EXTENSIONS = ('.pyc', '.pyo', '.elc', '~', '.swp', '.orig')
|
||||
|
||||
# set this to False if you've mx DateTime installed but you don't want your db
|
||||
# adapter to use it (should be set before you got a connection)
|
||||
USE_MX_DATETIME = True
|
||||
|
||||
|
||||
class attrdict(dict):
|
||||
"""A dictionary for which keys are also accessible as attributes."""
|
||||
def __getattr__(self, attr):
|
||||
try:
|
||||
return self[attr]
|
||||
except KeyError:
|
||||
raise AttributeError(attr)
|
||||
|
||||
class dictattr(dict):
|
||||
def __init__(self, proxy):
|
||||
self.__proxy = proxy
|
||||
|
||||
def __getitem__(self, attr):
|
||||
try:
|
||||
return getattr(self.__proxy, attr)
|
||||
except AttributeError:
|
||||
raise KeyError(attr)
|
||||
|
||||
class nullobject(object):
|
||||
def __repr__(self):
|
||||
return '<nullobject>'
|
||||
def __bool__(self):
|
||||
return False
|
||||
__nonzero__ = __bool__
|
||||
|
||||
class tempattr(object):
|
||||
def __init__(self, obj, attr, value):
|
||||
self.obj = obj
|
||||
self.attr = attr
|
||||
self.value = value
|
||||
|
||||
def __enter__(self):
|
||||
self.oldvalue = getattr(self.obj, self.attr)
|
||||
setattr(self.obj, self.attr, self.value)
|
||||
return self.obj
|
||||
|
||||
def __exit__(self, exctype, value, traceback):
|
||||
setattr(self.obj, self.attr, self.oldvalue)
|
||||
|
||||
|
||||
|
||||
# flatten -----
|
||||
# XXX move in a specific module and use yield instead
|
||||
# do not mix flatten and translate
|
||||
#
|
||||
# def iterable(obj):
|
||||
# try: iter(obj)
|
||||
# except: return False
|
||||
# return True
|
||||
#
|
||||
# def is_string_like(obj):
|
||||
# try: obj +''
|
||||
# except (TypeError, ValueError): return False
|
||||
# return True
|
||||
#
|
||||
#def is_scalar(obj):
|
||||
# return is_string_like(obj) or not iterable(obj)
|
||||
#
|
||||
#def flatten(seq):
|
||||
# for item in seq:
|
||||
# if is_scalar(item):
|
||||
# yield item
|
||||
# else:
|
||||
# for subitem in flatten(item):
|
||||
# yield subitem
|
||||
|
||||
def flatten(iterable, tr_func=None, results=None):
|
||||
"""Flatten a list of list with any level.
|
||||
|
||||
If tr_func is not None, it should be a one argument function that'll be called
|
||||
on each final element.
|
||||
|
||||
:rtype: list
|
||||
|
||||
>>> flatten([1, [2, 3]])
|
||||
[1, 2, 3]
|
||||
"""
|
||||
if results is None:
|
||||
results = []
|
||||
for val in iterable:
|
||||
if isinstance(val, (list, tuple)):
|
||||
flatten(val, tr_func, results)
|
||||
elif tr_func is None:
|
||||
results.append(val)
|
||||
else:
|
||||
results.append(tr_func(val))
|
||||
return results
|
||||
|
||||
|
||||
# XXX is function below still used ?
|
||||
|
||||
def make_domains(lists):
|
||||
"""
|
||||
Given a list of lists, return a list of domain for each list to produce all
|
||||
combinations of possibles values.
|
||||
|
||||
:rtype: list
|
||||
|
||||
Example:
|
||||
|
||||
>>> make_domains(['a', 'b'], ['c','d', 'e'])
|
||||
[['a', 'b', 'a', 'b', 'a', 'b'], ['c', 'c', 'd', 'd', 'e', 'e']]
|
||||
"""
|
||||
from six.moves import range
|
||||
domains = []
|
||||
for iterable in lists:
|
||||
new_domain = iterable[:]
|
||||
for i in range(len(domains)):
|
||||
domains[i] = domains[i]*len(iterable)
|
||||
if domains:
|
||||
missing = (len(domains[0]) - len(iterable)) / len(iterable)
|
||||
i = 0
|
||||
for j in range(len(iterable)):
|
||||
value = iterable[j]
|
||||
for dummy in range(missing):
|
||||
new_domain.insert(i, value)
|
||||
i += 1
|
||||
i += 1
|
||||
domains.append(new_domain)
|
||||
return domains
|
||||
|
||||
|
||||
# private stuff ################################################################
|
||||
|
||||
def _handle_blacklist(blacklist, dirnames, filenames):
|
||||
"""remove files/directories in the black list
|
||||
|
||||
dirnames/filenames are usually from os.walk
|
||||
"""
|
||||
for norecurs in blacklist:
|
||||
if norecurs in dirnames:
|
||||
dirnames.remove(norecurs)
|
||||
elif norecurs in filenames:
|
||||
filenames.remove(norecurs)
|
||||
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
|
@ -1,114 +0,0 @@
|
|||
# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
|
||||
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
|
||||
#
|
||||
# This file is part of logilab-common.
|
||||
#
|
||||
# logilab-common is free software: you can redistribute it and/or modify it under
|
||||
# the terms of the GNU Lesser General Public License as published by the Free
|
||||
# Software Foundation, either version 2.1 of the License, or (at your option) any
|
||||
# later version.
|
||||
#
|
||||
# logilab-common is distributed in the hope that it will be useful, but WITHOUT
|
||||
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
|
||||
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
|
||||
# details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License along
|
||||
# with logilab-common. If not, see <http://www.gnu.org/licenses/>.
|
||||
"""Cache module, with a least recently used algorithm for the management of the
|
||||
deletion of entries.
|
||||
|
||||
|
||||
|
||||
|
||||
"""
|
||||
__docformat__ = "restructuredtext en"
|
||||
|
||||
from threading import Lock
|
||||
|
||||
from logilab.common.decorators import locked
|
||||
|
||||
_marker = object()
|
||||
|
||||
class Cache(dict):
|
||||
"""A dictionary like cache.
|
||||
|
||||
inv:
|
||||
len(self._usage) <= self.size
|
||||
len(self.data) <= self.size
|
||||
"""
|
||||
|
||||
def __init__(self, size=100):
|
||||
""" Warning : Cache.__init__() != dict.__init__().
|
||||
Constructor does not take any arguments beside size.
|
||||
"""
|
||||
assert size >= 0, 'cache size must be >= 0 (0 meaning no caching)'
|
||||
self.size = size
|
||||
self._usage = []
|
||||
self._lock = Lock()
|
||||
super(Cache, self).__init__()
|
||||
|
||||
def _acquire(self):
|
||||
self._lock.acquire()
|
||||
|
||||
def _release(self):
|
||||
self._lock.release()
|
||||
|
||||
def _update_usage(self, key):
|
||||
if not self._usage:
|
||||
self._usage.append(key)
|
||||
elif self._usage[-1] != key:
|
||||
try:
|
||||
self._usage.remove(key)
|
||||
except ValueError:
|
||||
# we are inserting a new key
|
||||
# check the size of the dictionary
|
||||
# and remove the oldest item in the cache
|
||||
if self.size and len(self._usage) >= self.size:
|
||||
super(Cache, self).__delitem__(self._usage[0])
|
||||
del self._usage[0]
|
||||
self._usage.append(key)
|
||||
else:
|
||||
pass # key is already the most recently used key
|
||||
|
||||
def __getitem__(self, key):
|
||||
value = super(Cache, self).__getitem__(key)
|
||||
self._update_usage(key)
|
||||
return value
|
||||
__getitem__ = locked(_acquire, _release)(__getitem__)
|
||||
|
||||
def __setitem__(self, key, item):
|
||||
# Just make sure that size > 0 before inserting a new item in the cache
|
||||
if self.size > 0:
|
||||
super(Cache, self).__setitem__(key, item)
|
||||
self._update_usage(key)
|
||||
__setitem__ = locked(_acquire, _release)(__setitem__)
|
||||
|
||||
def __delitem__(self, key):
|
||||
super(Cache, self).__delitem__(key)
|
||||
self._usage.remove(key)
|
||||
__delitem__ = locked(_acquire, _release)(__delitem__)
|
||||
|
||||
def clear(self):
|
||||
super(Cache, self).clear()
|
||||
self._usage = []
|
||||
clear = locked(_acquire, _release)(clear)
|
||||
|
||||
def pop(self, key, default=_marker):
|
||||
if key in self:
|
||||
self._usage.remove(key)
|
||||
#if default is _marker:
|
||||
# return super(Cache, self).pop(key)
|
||||
return super(Cache, self).pop(key, default)
|
||||
pop = locked(_acquire, _release)(pop)
|
||||
|
||||
def popitem(self):
|
||||
raise NotImplementedError()
|
||||
|
||||
def setdefault(self, key, default=None):
|
||||
raise NotImplementedError()
|
||||
|
||||
def update(self, other):
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
|
|
@ -1,238 +0,0 @@
|
|||
# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
|
||||
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
|
||||
#
|
||||
# This file is part of logilab-common.
|
||||
#
|
||||
# logilab-common is free software: you can redistribute it and/or modify it under
|
||||
# the terms of the GNU Lesser General Public License as published by the Free
|
||||
# Software Foundation, either version 2.1 of the License, or (at your option) any
|
||||
# later version.
|
||||
#
|
||||
# logilab-common is distributed in the hope that it will be useful, but WITHOUT
|
||||
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
|
||||
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
|
||||
# details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License along
|
||||
# with logilab-common. If not, see <http://www.gnu.org/licenses/>.
|
||||
"""Manipulation of upstream change log files.
|
||||
|
||||
The upstream change log files format handled is simpler than the one
|
||||
often used such as those generated by the default Emacs changelog mode.
|
||||
|
||||
Sample ChangeLog format::
|
||||
|
||||
Change log for project Yoo
|
||||
==========================
|
||||
|
||||
--
|
||||
* add a new functionality
|
||||
|
||||
2002-02-01 -- 0.1.1
|
||||
* fix bug #435454
|
||||
* fix bug #434356
|
||||
|
||||
2002-01-01 -- 0.1
|
||||
* initial release
|
||||
|
||||
|
||||
There is 3 entries in this change log, one for each released version and one
|
||||
for the next version (i.e. the current entry).
|
||||
Each entry contains a set of messages corresponding to changes done in this
|
||||
release.
|
||||
All the non empty lines before the first entry are considered as the change
|
||||
log title.
|
||||
"""
|
||||
|
||||
__docformat__ = "restructuredtext en"
|
||||
|
||||
import sys
|
||||
from stat import S_IWRITE
|
||||
|
||||
from six import string_types
|
||||
|
||||
BULLET = '*'
|
||||
SUBBULLET = '-'
|
||||
INDENT = ' ' * 4
|
||||
|
||||
class NoEntry(Exception):
|
||||
"""raised when we are unable to find an entry"""
|
||||
|
||||
class EntryNotFound(Exception):
|
||||
"""raised when we are unable to find a given entry"""
|
||||
|
||||
class Version(tuple):
|
||||
"""simple class to handle soft version number has a tuple while
|
||||
correctly printing it as X.Y.Z
|
||||
"""
|
||||
def __new__(cls, versionstr):
|
||||
if isinstance(versionstr, string_types):
|
||||
versionstr = versionstr.strip(' :') # XXX (syt) duh?
|
||||
parsed = cls.parse(versionstr)
|
||||
else:
|
||||
parsed = versionstr
|
||||
return tuple.__new__(cls, parsed)
|
||||
|
||||
@classmethod
|
||||
def parse(cls, versionstr):
|
||||
versionstr = versionstr.strip(' :')
|
||||
try:
|
||||
return [int(i) for i in versionstr.split('.')]
|
||||
except ValueError as ex:
|
||||
raise ValueError("invalid literal for version '%s' (%s)"%(versionstr, ex))
|
||||
|
||||
def __str__(self):
|
||||
return '.'.join([str(i) for i in self])
|
||||
|
||||
# upstream change log #########################################################
|
||||
|
||||
class ChangeLogEntry(object):
|
||||
"""a change log entry, i.e. a set of messages associated to a version and
|
||||
its release date
|
||||
"""
|
||||
version_class = Version
|
||||
|
||||
def __init__(self, date=None, version=None, **kwargs):
|
||||
self.__dict__.update(kwargs)
|
||||
if version:
|
||||
self.version = self.version_class(version)
|
||||
else:
|
||||
self.version = None
|
||||
self.date = date
|
||||
self.messages = []
|
||||
|
||||
def add_message(self, msg):
|
||||
"""add a new message"""
|
||||
self.messages.append(([msg], []))
|
||||
|
||||
def complete_latest_message(self, msg_suite):
|
||||
"""complete the latest added message
|
||||
"""
|
||||
if not self.messages:
|
||||
raise ValueError('unable to complete last message as there is no previous message)')
|
||||
if self.messages[-1][1]: # sub messages
|
||||
self.messages[-1][1][-1].append(msg_suite)
|
||||
else: # message
|
||||
self.messages[-1][0].append(msg_suite)
|
||||
|
||||
def add_sub_message(self, sub_msg, key=None):
|
||||
if not self.messages:
|
||||
raise ValueError('unable to complete last message as there is no previous message)')
|
||||
if key is None:
|
||||
self.messages[-1][1].append([sub_msg])
|
||||
else:
|
||||
raise NotImplementedError("sub message to specific key are not implemented yet")
|
||||
|
||||
def write(self, stream=sys.stdout):
|
||||
"""write the entry to file """
|
||||
stream.write('%s -- %s\n' % (self.date or '', self.version or ''))
|
||||
for msg, sub_msgs in self.messages:
|
||||
stream.write('%s%s %s\n' % (INDENT, BULLET, msg[0]))
|
||||
stream.write(''.join(msg[1:]))
|
||||
if sub_msgs:
|
||||
stream.write('\n')
|
||||
for sub_msg in sub_msgs:
|
||||
stream.write('%s%s %s\n' % (INDENT * 2, SUBBULLET, sub_msg[0]))
|
||||
stream.write(''.join(sub_msg[1:]))
|
||||
stream.write('\n')
|
||||
|
||||
stream.write('\n\n')
|
||||
|
||||
class ChangeLog(object):
|
||||
"""object representation of a whole ChangeLog file"""
|
||||
|
||||
entry_class = ChangeLogEntry
|
||||
|
||||
def __init__(self, changelog_file, title=''):
|
||||
self.file = changelog_file
|
||||
self.title = title
|
||||
self.additional_content = ''
|
||||
self.entries = []
|
||||
self.load()
|
||||
|
||||
def __repr__(self):
|
||||
return '<ChangeLog %s at %s (%s entries)>' % (self.file, id(self),
|
||||
len(self.entries))
|
||||
|
||||
def add_entry(self, entry):
|
||||
"""add a new entry to the change log"""
|
||||
self.entries.append(entry)
|
||||
|
||||
def get_entry(self, version='', create=None):
|
||||
""" return a given changelog entry
|
||||
if version is omitted, return the current entry
|
||||
"""
|
||||
if not self.entries:
|
||||
if version or not create:
|
||||
raise NoEntry()
|
||||
self.entries.append(self.entry_class())
|
||||
if not version:
|
||||
if self.entries[0].version and create is not None:
|
||||
self.entries.insert(0, self.entry_class())
|
||||
return self.entries[0]
|
||||
version = self.version_class(version)
|
||||
for entry in self.entries:
|
||||
if entry.version == version:
|
||||
return entry
|
||||
raise EntryNotFound()
|
||||
|
||||
def add(self, msg, create=None):
|
||||
"""add a new message to the latest opened entry"""
|
||||
entry = self.get_entry(create=create)
|
||||
entry.add_message(msg)
|
||||
|
||||
def load(self):
|
||||
""" read a logilab's ChangeLog from file """
|
||||
try:
|
||||
stream = open(self.file)
|
||||
except IOError:
|
||||
return
|
||||
last = None
|
||||
expect_sub = False
|
||||
for line in stream.readlines():
|
||||
sline = line.strip()
|
||||
words = sline.split()
|
||||
# if new entry
|
||||
if len(words) == 1 and words[0] == '--':
|
||||
expect_sub = False
|
||||
last = self.entry_class()
|
||||
self.add_entry(last)
|
||||
# if old entry
|
||||
elif len(words) == 3 and words[1] == '--':
|
||||
expect_sub = False
|
||||
last = self.entry_class(words[0], words[2])
|
||||
self.add_entry(last)
|
||||
# if title
|
||||
elif sline and last is None:
|
||||
self.title = '%s%s' % (self.title, line)
|
||||
# if new entry
|
||||
elif sline and sline[0] == BULLET:
|
||||
expect_sub = False
|
||||
last.add_message(sline[1:].strip())
|
||||
# if new sub_entry
|
||||
elif expect_sub and sline and sline[0] == SUBBULLET:
|
||||
last.add_sub_message(sline[1:].strip())
|
||||
# if new line for current entry
|
||||
elif sline and last.messages:
|
||||
last.complete_latest_message(line)
|
||||
else:
|
||||
expect_sub = True
|
||||
self.additional_content += line
|
||||
stream.close()
|
||||
|
||||
def format_title(self):
|
||||
return '%s\n\n' % self.title.strip()
|
||||
|
||||
def save(self):
|
||||
"""write back change log"""
|
||||
# filetutils isn't importable in appengine, so import locally
|
||||
from logilab.common.fileutils import ensure_fs_mode
|
||||
ensure_fs_mode(self.file, S_IWRITE)
|
||||
self.write(open(self.file, 'w'))
|
||||
|
||||
def write(self, stream=sys.stdout):
|
||||
"""write changelog to stream"""
|
||||
stream.write(self.format_title())
|
||||
for entry in self.entries:
|
||||
entry.write(stream)
|
||||
|
||||
|
|
@ -1,334 +0,0 @@
|
|||
# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
|
||||
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
|
||||
#
|
||||
# This file is part of logilab-common.
|
||||
#
|
||||
# logilab-common is free software: you can redistribute it and/or modify it under
|
||||
# the terms of the GNU Lesser General Public License as published by the Free
|
||||
# Software Foundation, either version 2.1 of the License, or (at your option) any
|
||||
# later version.
|
||||
#
|
||||
# logilab-common is distributed in the hope that it will be useful, but WITHOUT
|
||||
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
|
||||
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
|
||||
# details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License along
|
||||
# with logilab-common. If not, see <http://www.gnu.org/licenses/>.
|
||||
"""Helper functions to support command line tools providing more than
|
||||
one command.
|
||||
|
||||
e.g called as "tool command [options] args..." where <options> and <args> are
|
||||
command'specific
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
__docformat__ = "restructuredtext en"
|
||||
|
||||
import sys
|
||||
import logging
|
||||
from os.path import basename
|
||||
|
||||
from logilab.common.configuration import Configuration
|
||||
from logilab.common.logging_ext import init_log, get_threshold
|
||||
from logilab.common.deprecation import deprecated
|
||||
|
||||
|
||||
class BadCommandUsage(Exception):
|
||||
"""Raised when an unknown command is used or when a command is not
|
||||
correctly used (bad options, too much / missing arguments...).
|
||||
|
||||
Trigger display of command usage.
|
||||
"""
|
||||
|
||||
class CommandError(Exception):
|
||||
"""Raised when a command can't be processed and we want to display it and
|
||||
exit, without traceback nor usage displayed.
|
||||
"""
|
||||
|
||||
|
||||
# command line access point ####################################################
|
||||
|
||||
class CommandLine(dict):
|
||||
"""Usage:
|
||||
|
||||
>>> LDI = cli.CommandLine('ldi', doc='Logilab debian installer',
|
||||
version=version, rcfile=RCFILE)
|
||||
>>> LDI.register(MyCommandClass)
|
||||
>>> LDI.register(MyOtherCommandClass)
|
||||
>>> LDI.run(sys.argv[1:])
|
||||
|
||||
Arguments:
|
||||
|
||||
* `pgm`, the program name, default to `basename(sys.argv[0])`
|
||||
|
||||
* `doc`, a short description of the command line tool
|
||||
|
||||
* `copyright`, additional doc string that will be appended to the generated
|
||||
doc
|
||||
|
||||
* `version`, version number of string of the tool. If specified, global
|
||||
--version option will be available.
|
||||
|
||||
* `rcfile`, path to a configuration file. If specified, global --C/--rc-file
|
||||
option will be available? self.rcfile = rcfile
|
||||
|
||||
* `logger`, logger to propagate to commands, default to
|
||||
`logging.getLogger(self.pgm))`
|
||||
"""
|
||||
def __init__(self, pgm=None, doc=None, copyright=None, version=None,
|
||||
rcfile=None, logthreshold=logging.ERROR,
|
||||
check_duplicated_command=True):
|
||||
if pgm is None:
|
||||
pgm = basename(sys.argv[0])
|
||||
self.pgm = pgm
|
||||
self.doc = doc
|
||||
self.copyright = copyright
|
||||
self.version = version
|
||||
self.rcfile = rcfile
|
||||
self.logger = None
|
||||
self.logthreshold = logthreshold
|
||||
self.check_duplicated_command = check_duplicated_command
|
||||
|
||||
def register(self, cls, force=False):
|
||||
"""register the given :class:`Command` subclass"""
|
||||
assert not self.check_duplicated_command or force or not cls.name in self, \
|
||||
'a command %s is already defined' % cls.name
|
||||
self[cls.name] = cls
|
||||
return cls
|
||||
|
||||
def run(self, args):
|
||||
"""main command line access point:
|
||||
* init logging
|
||||
* handle global options (-h/--help, --version, -C/--rc-file)
|
||||
* check command
|
||||
* run command
|
||||
|
||||
Terminate by :exc:`SystemExit`
|
||||
"""
|
||||
init_log(debug=True, # so that we use StreamHandler
|
||||
logthreshold=self.logthreshold,
|
||||
logformat='%(levelname)s: %(message)s')
|
||||
try:
|
||||
arg = args.pop(0)
|
||||
except IndexError:
|
||||
self.usage_and_exit(1)
|
||||
if arg in ('-h', '--help'):
|
||||
self.usage_and_exit(0)
|
||||
if self.version is not None and arg in ('--version'):
|
||||
print(self.version)
|
||||
sys.exit(0)
|
||||
rcfile = self.rcfile
|
||||
if rcfile is not None and arg in ('-C', '--rc-file'):
|
||||
try:
|
||||
rcfile = args.pop(0)
|
||||
arg = args.pop(0)
|
||||
except IndexError:
|
||||
self.usage_and_exit(1)
|
||||
try:
|
||||
command = self.get_command(arg)
|
||||
except KeyError:
|
||||
print('ERROR: no %s command' % arg)
|
||||
print()
|
||||
self.usage_and_exit(1)
|
||||
try:
|
||||
sys.exit(command.main_run(args, rcfile))
|
||||
except KeyboardInterrupt as exc:
|
||||
print('Interrupted', end=' ')
|
||||
if str(exc):
|
||||
print(': %s' % exc, end=' ')
|
||||
print()
|
||||
sys.exit(4)
|
||||
except BadCommandUsage as err:
|
||||
print('ERROR:', err)
|
||||
print()
|
||||
print(command.help())
|
||||
sys.exit(1)
|
||||
|
||||
def create_logger(self, handler, logthreshold=None):
|
||||
logger = logging.Logger(self.pgm)
|
||||
logger.handlers = [handler]
|
||||
if logthreshold is None:
|
||||
logthreshold = get_threshold(self.logthreshold)
|
||||
logger.setLevel(logthreshold)
|
||||
return logger
|
||||
|
||||
def get_command(self, cmd, logger=None):
|
||||
if logger is None:
|
||||
logger = self.logger
|
||||
if logger is None:
|
||||
logger = self.logger = logging.getLogger(self.pgm)
|
||||
logger.setLevel(get_threshold(self.logthreshold))
|
||||
return self[cmd](logger)
|
||||
|
||||
def usage(self):
|
||||
"""display usage for the main program (i.e. when no command supplied)
|
||||
and exit
|
||||
"""
|
||||
print('usage:', self.pgm, end=' ')
|
||||
if self.rcfile:
|
||||
print('[--rc-file=<configuration file>]', end=' ')
|
||||
print('<command> [options] <command argument>...')
|
||||
if self.doc:
|
||||
print('\n%s' % self.doc)
|
||||
print('''
|
||||
Type "%(pgm)s <command> --help" for more information about a specific
|
||||
command. Available commands are :\n''' % self.__dict__)
|
||||
max_len = max([len(cmd) for cmd in self])
|
||||
padding = ' ' * max_len
|
||||
for cmdname, cmd in sorted(self.items()):
|
||||
if not cmd.hidden:
|
||||
print(' ', (cmdname + padding)[:max_len], cmd.short_description())
|
||||
if self.rcfile:
|
||||
print('''
|
||||
Use --rc-file=<configuration file> / -C <configuration file> before the command
|
||||
to specify a configuration file. Default to %s.
|
||||
''' % self.rcfile)
|
||||
print('''%(pgm)s -h/--help
|
||||
display this usage information and exit''' % self.__dict__)
|
||||
if self.version:
|
||||
print('''%(pgm)s -v/--version
|
||||
display version configuration and exit''' % self.__dict__)
|
||||
if self.copyright:
|
||||
print('\n', self.copyright)
|
||||
|
||||
def usage_and_exit(self, status):
|
||||
self.usage()
|
||||
sys.exit(status)
|
||||
|
||||
|
||||
# base command classes #########################################################
|
||||
|
||||
class Command(Configuration):
|
||||
"""Base class for command line commands.
|
||||
|
||||
Class attributes:
|
||||
|
||||
* `name`, the name of the command
|
||||
|
||||
* `min_args`, minimum number of arguments, None if unspecified
|
||||
|
||||
* `max_args`, maximum number of arguments, None if unspecified
|
||||
|
||||
* `arguments`, string describing arguments, used in command usage
|
||||
|
||||
* `hidden`, boolean flag telling if the command should be hidden, e.g. does
|
||||
not appear in help's commands list
|
||||
|
||||
* `options`, options list, as allowed by :mod:configuration
|
||||
"""
|
||||
|
||||
arguments = ''
|
||||
name = ''
|
||||
# hidden from help ?
|
||||
hidden = False
|
||||
# max/min args, None meaning unspecified
|
||||
min_args = None
|
||||
max_args = None
|
||||
|
||||
@classmethod
|
||||
def description(cls):
|
||||
return cls.__doc__.replace(' ', '')
|
||||
|
||||
@classmethod
|
||||
def short_description(cls):
|
||||
return cls.description().split('.')[0]
|
||||
|
||||
def __init__(self, logger):
|
||||
usage = '%%prog %s %s\n\n%s' % (self.name, self.arguments,
|
||||
self.description())
|
||||
Configuration.__init__(self, usage=usage)
|
||||
self.logger = logger
|
||||
|
||||
def check_args(self, args):
|
||||
"""check command's arguments are provided"""
|
||||
if self.min_args is not None and len(args) < self.min_args:
|
||||
raise BadCommandUsage('missing argument')
|
||||
if self.max_args is not None and len(args) > self.max_args:
|
||||
raise BadCommandUsage('too many arguments')
|
||||
|
||||
def main_run(self, args, rcfile=None):
|
||||
"""Run the command and return status 0 if everything went fine.
|
||||
|
||||
If :exc:`CommandError` is raised by the underlying command, simply log
|
||||
the error and return status 2.
|
||||
|
||||
Any other exceptions, including :exc:`BadCommandUsage` will be
|
||||
propagated.
|
||||
"""
|
||||
if rcfile:
|
||||
self.load_file_configuration(rcfile)
|
||||
args = self.load_command_line_configuration(args)
|
||||
try:
|
||||
self.check_args(args)
|
||||
self.run(args)
|
||||
except CommandError as err:
|
||||
self.logger.error(err)
|
||||
return 2
|
||||
return 0
|
||||
|
||||
def run(self, args):
|
||||
"""run the command with its specific arguments"""
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
class ListCommandsCommand(Command):
|
||||
"""list available commands, useful for bash completion."""
|
||||
name = 'listcommands'
|
||||
arguments = '[command]'
|
||||
hidden = True
|
||||
|
||||
def run(self, args):
|
||||
"""run the command with its specific arguments"""
|
||||
if args:
|
||||
command = args.pop()
|
||||
cmd = _COMMANDS[command]
|
||||
for optname, optdict in cmd.options:
|
||||
print('--help')
|
||||
print('--' + optname)
|
||||
else:
|
||||
commands = sorted(_COMMANDS.keys())
|
||||
for command in commands:
|
||||
cmd = _COMMANDS[command]
|
||||
if not cmd.hidden:
|
||||
print(command)
|
||||
|
||||
|
||||
# deprecated stuff #############################################################
|
||||
|
||||
_COMMANDS = CommandLine()
|
||||
|
||||
DEFAULT_COPYRIGHT = '''\
|
||||
Copyright (c) 2004-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
|
||||
http://www.logilab.fr/ -- mailto:contact@logilab.fr'''
|
||||
|
||||
@deprecated('use cls.register(cli)')
|
||||
def register_commands(commands):
|
||||
"""register existing commands"""
|
||||
for command_klass in commands:
|
||||
_COMMANDS.register(command_klass)
|
||||
|
||||
@deprecated('use args.pop(0)')
|
||||
def main_run(args, doc=None, copyright=None, version=None):
|
||||
"""command line tool: run command specified by argument list (without the
|
||||
program name). Raise SystemExit with status 0 if everything went fine.
|
||||
|
||||
>>> main_run(sys.argv[1:])
|
||||
"""
|
||||
_COMMANDS.doc = doc
|
||||
_COMMANDS.copyright = copyright
|
||||
_COMMANDS.version = version
|
||||
_COMMANDS.run(args)
|
||||
|
||||
@deprecated('use args.pop(0)')
|
||||
def pop_arg(args_list, expected_size_after=None, msg="Missing argument"):
|
||||
"""helper function to get and check command line arguments"""
|
||||
try:
|
||||
value = args_list.pop(0)
|
||||
except IndexError:
|
||||
raise BadCommandUsage(msg)
|
||||
if expected_size_after is not None and len(args_list) > expected_size_after:
|
||||
raise BadCommandUsage('too many arguments')
|
||||
return value
|
||||
|
||||
|
|
@ -1,78 +0,0 @@
|
|||
# pylint: disable=E0601,W0622,W0611
|
||||
# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
|
||||
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
|
||||
#
|
||||
# This file is part of logilab-common.
|
||||
#
|
||||
# logilab-common is free software: you can redistribute it and/or modify it under
|
||||
# the terms of the GNU Lesser General Public License as published by the Free
|
||||
# Software Foundation, either version 2.1 of the License, or (at your option) any
|
||||
# later version.
|
||||
#
|
||||
# logilab-common is distributed in the hope that it will be useful, but WITHOUT
|
||||
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
|
||||
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
|
||||
# details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License along
|
||||
# with logilab-common. If not, see <http://www.gnu.org/licenses/>.
|
||||
"""Wrappers around some builtins introduced in python 2.3, 2.4 and
|
||||
2.5, making them available in for earlier versions of python.
|
||||
|
||||
See another compatibility snippets from other projects:
|
||||
|
||||
:mod:`lib2to3.fixes`
|
||||
:mod:`coverage.backward`
|
||||
:mod:`unittest2.compatibility`
|
||||
"""
|
||||
|
||||
|
||||
__docformat__ = "restructuredtext en"
|
||||
|
||||
import os
|
||||
import sys
|
||||
import types
|
||||
from warnings import warn
|
||||
|
||||
# not used here, but imported to preserve API
|
||||
from six.moves import builtins
|
||||
|
||||
if sys.version_info < (3, 0):
|
||||
str_to_bytes = str
|
||||
def str_encode(string, encoding):
|
||||
if isinstance(string, unicode):
|
||||
return string.encode(encoding)
|
||||
return str(string)
|
||||
else:
|
||||
def str_to_bytes(string):
|
||||
return str.encode(string)
|
||||
# we have to ignore the encoding in py3k to be able to write a string into a
|
||||
# TextIOWrapper or like object (which expect an unicode string)
|
||||
def str_encode(string, encoding):
|
||||
return str(string)
|
||||
|
||||
# See also http://bugs.python.org/issue11776
|
||||
if sys.version_info[0] == 3:
|
||||
def method_type(callable, instance, klass):
|
||||
# api change. klass is no more considered
|
||||
return types.MethodType(callable, instance)
|
||||
else:
|
||||
# alias types otherwise
|
||||
method_type = types.MethodType
|
||||
|
||||
# Pythons 2 and 3 differ on where to get StringIO
|
||||
if sys.version_info < (3, 0):
|
||||
from cStringIO import StringIO
|
||||
FileIO = file
|
||||
BytesIO = StringIO
|
||||
reload = reload
|
||||
else:
|
||||
from io import FileIO, BytesIO, StringIO
|
||||
from imp import reload
|
||||
|
||||
from logilab.common.deprecation import deprecated
|
||||
|
||||
# Other projects import these from here, keep providing them for
|
||||
# backwards compat
|
||||
any = deprecated('use builtin "any"')(any)
|
||||
all = deprecated('use builtin "all"')(all)
|
||||
File diff suppressed because it is too large
Load diff
|
|
@ -1,101 +0,0 @@
|
|||
# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
|
||||
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
|
||||
#
|
||||
# This file is part of logilab-common.
|
||||
#
|
||||
# logilab-common is free software: you can redistribute it and/or modify it under
|
||||
# the terms of the GNU Lesser General Public License as published by the Free
|
||||
# Software Foundation, either version 2.1 of the License, or (at your option) any
|
||||
# later version.
|
||||
#
|
||||
# logilab-common is distributed in the hope that it will be useful, but WITHOUT
|
||||
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
|
||||
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
|
||||
# details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License along
|
||||
# with logilab-common. If not, see <http://www.gnu.org/licenses/>.
|
||||
"""A daemonize function (for Unices)"""
|
||||
|
||||
__docformat__ = "restructuredtext en"
|
||||
|
||||
import os
|
||||
import errno
|
||||
import signal
|
||||
import sys
|
||||
import time
|
||||
import warnings
|
||||
|
||||
from six.moves import range
|
||||
|
||||
def setugid(user):
|
||||
"""Change process user and group ID
|
||||
|
||||
Argument is a numeric user id or a user name"""
|
||||
try:
|
||||
from pwd import getpwuid
|
||||
passwd = getpwuid(int(user))
|
||||
except ValueError:
|
||||
from pwd import getpwnam
|
||||
passwd = getpwnam(user)
|
||||
|
||||
if hasattr(os, 'initgroups'): # python >= 2.7
|
||||
os.initgroups(passwd.pw_name, passwd.pw_gid)
|
||||
else:
|
||||
import ctypes
|
||||
if ctypes.CDLL(None).initgroups(passwd.pw_name, passwd.pw_gid) < 0:
|
||||
err = ctypes.c_int.in_dll(ctypes.pythonapi,"errno").value
|
||||
raise OSError(err, os.strerror(err), 'initgroups')
|
||||
os.setgid(passwd.pw_gid)
|
||||
os.setuid(passwd.pw_uid)
|
||||
os.environ['HOME'] = passwd.pw_dir
|
||||
|
||||
|
||||
def daemonize(pidfile=None, uid=None, umask=0o77):
|
||||
"""daemonize a Unix process. Set paranoid umask by default.
|
||||
|
||||
Return 1 in the original process, 2 in the first fork, and None for the
|
||||
second fork (eg daemon process).
|
||||
"""
|
||||
# http://www.faqs.org/faqs/unix-faq/programmer/faq/
|
||||
#
|
||||
# fork so the parent can exit
|
||||
if os.fork(): # launch child and...
|
||||
return 1
|
||||
# disconnect from tty and create a new session
|
||||
os.setsid()
|
||||
# fork again so the parent, (the session group leader), can exit.
|
||||
# as a non-session group leader, we can never regain a controlling
|
||||
# terminal.
|
||||
if os.fork(): # launch child again.
|
||||
return 2
|
||||
# move to the root to avoit mount pb
|
||||
os.chdir('/')
|
||||
# redirect standard descriptors
|
||||
null = os.open('/dev/null', os.O_RDWR)
|
||||
for i in range(3):
|
||||
try:
|
||||
os.dup2(null, i)
|
||||
except OSError as e:
|
||||
if e.errno != errno.EBADF:
|
||||
raise
|
||||
os.close(null)
|
||||
# filter warnings
|
||||
warnings.filterwarnings('ignore')
|
||||
# write pid in a file
|
||||
if pidfile:
|
||||
# ensure the directory where the pid-file should be set exists (for
|
||||
# instance /var/run/cubicweb may be deleted on computer restart)
|
||||
piddir = os.path.dirname(pidfile)
|
||||
if not os.path.exists(piddir):
|
||||
os.makedirs(piddir)
|
||||
f = file(pidfile, 'w')
|
||||
f.write(str(os.getpid()))
|
||||
f.close()
|
||||
# set umask if specified
|
||||
if umask is not None:
|
||||
os.umask(umask)
|
||||
# change process uid
|
||||
if uid:
|
||||
setugid(uid)
|
||||
return None
|
||||
|
|
@ -1,335 +0,0 @@
|
|||
# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
|
||||
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
|
||||
#
|
||||
# This file is part of logilab-common.
|
||||
#
|
||||
# logilab-common is free software: you can redistribute it and/or modify it under
|
||||
# the terms of the GNU Lesser General Public License as published by the Free
|
||||
# Software Foundation, either version 2.1 of the License, or (at your option) any
|
||||
# later version.
|
||||
#
|
||||
# logilab-common is distributed in the hope that it will be useful, but WITHOUT
|
||||
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
|
||||
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
|
||||
# details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License along
|
||||
# with logilab-common. If not, see <http://www.gnu.org/licenses/>.
|
||||
"""Date manipulation helper functions."""
|
||||
from __future__ import division
|
||||
|
||||
__docformat__ = "restructuredtext en"
|
||||
|
||||
import math
|
||||
import re
|
||||
import sys
|
||||
from locale import getlocale, LC_TIME
|
||||
from datetime import date, time, datetime, timedelta
|
||||
from time import strptime as time_strptime
|
||||
from calendar import monthrange, timegm
|
||||
|
||||
from six.moves import range
|
||||
|
||||
try:
|
||||
from mx.DateTime import RelativeDateTime, Date, DateTimeType
|
||||
except ImportError:
|
||||
endOfMonth = None
|
||||
DateTimeType = datetime
|
||||
else:
|
||||
endOfMonth = RelativeDateTime(months=1, day=-1)
|
||||
|
||||
# NOTE: should we implement a compatibility layer between date representations
|
||||
# as we have in lgc.db ?
|
||||
|
||||
FRENCH_FIXED_HOLIDAYS = {
|
||||
'jour_an': '%s-01-01',
|
||||
'fete_travail': '%s-05-01',
|
||||
'armistice1945': '%s-05-08',
|
||||
'fete_nat': '%s-07-14',
|
||||
'assomption': '%s-08-15',
|
||||
'toussaint': '%s-11-01',
|
||||
'armistice1918': '%s-11-11',
|
||||
'noel': '%s-12-25',
|
||||
}
|
||||
|
||||
FRENCH_MOBILE_HOLIDAYS = {
|
||||
'paques2004': '2004-04-12',
|
||||
'ascension2004': '2004-05-20',
|
||||
'pentecote2004': '2004-05-31',
|
||||
|
||||
'paques2005': '2005-03-28',
|
||||
'ascension2005': '2005-05-05',
|
||||
'pentecote2005': '2005-05-16',
|
||||
|
||||
'paques2006': '2006-04-17',
|
||||
'ascension2006': '2006-05-25',
|
||||
'pentecote2006': '2006-06-05',
|
||||
|
||||
'paques2007': '2007-04-09',
|
||||
'ascension2007': '2007-05-17',
|
||||
'pentecote2007': '2007-05-28',
|
||||
|
||||
'paques2008': '2008-03-24',
|
||||
'ascension2008': '2008-05-01',
|
||||
'pentecote2008': '2008-05-12',
|
||||
|
||||
'paques2009': '2009-04-13',
|
||||
'ascension2009': '2009-05-21',
|
||||
'pentecote2009': '2009-06-01',
|
||||
|
||||
'paques2010': '2010-04-05',
|
||||
'ascension2010': '2010-05-13',
|
||||
'pentecote2010': '2010-05-24',
|
||||
|
||||
'paques2011': '2011-04-25',
|
||||
'ascension2011': '2011-06-02',
|
||||
'pentecote2011': '2011-06-13',
|
||||
|
||||
'paques2012': '2012-04-09',
|
||||
'ascension2012': '2012-05-17',
|
||||
'pentecote2012': '2012-05-28',
|
||||
}
|
||||
|
||||
# XXX this implementation cries for multimethod dispatching
|
||||
|
||||
def get_step(dateobj, nbdays=1):
|
||||
# assume date is either a python datetime or a mx.DateTime object
|
||||
if isinstance(dateobj, date):
|
||||
return ONEDAY * nbdays
|
||||
return nbdays # mx.DateTime is ok with integers
|
||||
|
||||
def datefactory(year, month, day, sampledate):
|
||||
# assume date is either a python datetime or a mx.DateTime object
|
||||
if isinstance(sampledate, datetime):
|
||||
return datetime(year, month, day)
|
||||
if isinstance(sampledate, date):
|
||||
return date(year, month, day)
|
||||
return Date(year, month, day)
|
||||
|
||||
def weekday(dateobj):
|
||||
# assume date is either a python datetime or a mx.DateTime object
|
||||
if isinstance(dateobj, date):
|
||||
return dateobj.weekday()
|
||||
return dateobj.day_of_week
|
||||
|
||||
def str2date(datestr, sampledate):
|
||||
# NOTE: datetime.strptime is not an option until we drop py2.4 compat
|
||||
year, month, day = [int(chunk) for chunk in datestr.split('-')]
|
||||
return datefactory(year, month, day, sampledate)
|
||||
|
||||
def days_between(start, end):
|
||||
if isinstance(start, date):
|
||||
delta = end - start
|
||||
# datetime.timedelta.days is always an integer (floored)
|
||||
if delta.seconds:
|
||||
return delta.days + 1
|
||||
return delta.days
|
||||
else:
|
||||
return int(math.ceil((end - start).days))
|
||||
|
||||
def get_national_holidays(begin, end):
|
||||
"""return french national days off between begin and end"""
|
||||
begin = datefactory(begin.year, begin.month, begin.day, begin)
|
||||
end = datefactory(end.year, end.month, end.day, end)
|
||||
holidays = [str2date(datestr, begin)
|
||||
for datestr in FRENCH_MOBILE_HOLIDAYS.values()]
|
||||
for year in range(begin.year, end.year+1):
|
||||
for datestr in FRENCH_FIXED_HOLIDAYS.values():
|
||||
date = str2date(datestr % year, begin)
|
||||
if date not in holidays:
|
||||
holidays.append(date)
|
||||
return [day for day in holidays if begin <= day < end]
|
||||
|
||||
def add_days_worked(start, days):
|
||||
"""adds date but try to only take days worked into account"""
|
||||
step = get_step(start)
|
||||
weeks, plus = divmod(days, 5)
|
||||
end = start + ((weeks * 7) + plus) * step
|
||||
if weekday(end) >= 5: # saturday or sunday
|
||||
end += (2 * step)
|
||||
end += len([x for x in get_national_holidays(start, end + step)
|
||||
if weekday(x) < 5]) * step
|
||||
if weekday(end) >= 5: # saturday or sunday
|
||||
end += (2 * step)
|
||||
return end
|
||||
|
||||
def nb_open_days(start, end):
|
||||
assert start <= end
|
||||
step = get_step(start)
|
||||
days = days_between(start, end)
|
||||
weeks, plus = divmod(days, 7)
|
||||
if weekday(start) > weekday(end):
|
||||
plus -= 2
|
||||
elif weekday(end) == 6:
|
||||
plus -= 1
|
||||
open_days = weeks * 5 + plus
|
||||
nb_week_holidays = len([x for x in get_national_holidays(start, end+step)
|
||||
if weekday(x) < 5 and x < end])
|
||||
open_days -= nb_week_holidays
|
||||
if open_days < 0:
|
||||
return 0
|
||||
return open_days
|
||||
|
||||
def date_range(begin, end, incday=None, incmonth=None):
|
||||
"""yields each date between begin and end
|
||||
|
||||
:param begin: the start date
|
||||
:param end: the end date
|
||||
:param incr: the step to use to iterate over dates. Default is
|
||||
one day.
|
||||
:param include: None (means no exclusion) or a function taking a
|
||||
date as parameter, and returning True if the date
|
||||
should be included.
|
||||
|
||||
When using mx datetime, you should *NOT* use incmonth argument, use instead
|
||||
oneDay, oneHour, oneMinute, oneSecond, oneWeek or endOfMonth (to enumerate
|
||||
months) as `incday` argument
|
||||
"""
|
||||
assert not (incday and incmonth)
|
||||
begin = todate(begin)
|
||||
end = todate(end)
|
||||
if incmonth:
|
||||
while begin < end:
|
||||
yield begin
|
||||
begin = next_month(begin, incmonth)
|
||||
else:
|
||||
incr = get_step(begin, incday or 1)
|
||||
while begin < end:
|
||||
yield begin
|
||||
begin += incr
|
||||
|
||||
# makes py datetime usable #####################################################
|
||||
|
||||
ONEDAY = timedelta(days=1)
|
||||
ONEWEEK = timedelta(days=7)
|
||||
|
||||
try:
|
||||
strptime = datetime.strptime
|
||||
except AttributeError: # py < 2.5
|
||||
from time import strptime as time_strptime
|
||||
def strptime(value, format):
|
||||
return datetime(*time_strptime(value, format)[:6])
|
||||
|
||||
def strptime_time(value, format='%H:%M'):
|
||||
return time(*time_strptime(value, format)[3:6])
|
||||
|
||||
def todate(somedate):
|
||||
"""return a date from a date (leaving unchanged) or a datetime"""
|
||||
if isinstance(somedate, datetime):
|
||||
return date(somedate.year, somedate.month, somedate.day)
|
||||
assert isinstance(somedate, (date, DateTimeType)), repr(somedate)
|
||||
return somedate
|
||||
|
||||
def totime(somedate):
|
||||
"""return a time from a time (leaving unchanged), date or datetime"""
|
||||
# XXX mx compat
|
||||
if not isinstance(somedate, time):
|
||||
return time(somedate.hour, somedate.minute, somedate.second)
|
||||
assert isinstance(somedate, (time)), repr(somedate)
|
||||
return somedate
|
||||
|
||||
def todatetime(somedate):
|
||||
"""return a date from a date (leaving unchanged) or a datetime"""
|
||||
# take care, datetime is a subclass of date
|
||||
if isinstance(somedate, datetime):
|
||||
return somedate
|
||||
assert isinstance(somedate, (date, DateTimeType)), repr(somedate)
|
||||
return datetime(somedate.year, somedate.month, somedate.day)
|
||||
|
||||
def datetime2ticks(somedate):
|
||||
return timegm(somedate.timetuple()) * 1000
|
||||
|
||||
def ticks2datetime(ticks):
|
||||
miliseconds, microseconds = divmod(ticks, 1000)
|
||||
try:
|
||||
return datetime.fromtimestamp(miliseconds)
|
||||
except (ValueError, OverflowError):
|
||||
epoch = datetime.fromtimestamp(0)
|
||||
nb_days, seconds = divmod(int(miliseconds), 86400)
|
||||
delta = timedelta(nb_days, seconds=seconds, microseconds=microseconds)
|
||||
try:
|
||||
return epoch + delta
|
||||
except (ValueError, OverflowError):
|
||||
raise
|
||||
|
||||
def days_in_month(somedate):
|
||||
return monthrange(somedate.year, somedate.month)[1]
|
||||
|
||||
def days_in_year(somedate):
|
||||
feb = date(somedate.year, 2, 1)
|
||||
if days_in_month(feb) == 29:
|
||||
return 366
|
||||
else:
|
||||
return 365
|
||||
|
||||
def previous_month(somedate, nbmonth=1):
|
||||
while nbmonth:
|
||||
somedate = first_day(somedate) - ONEDAY
|
||||
nbmonth -= 1
|
||||
return somedate
|
||||
|
||||
def next_month(somedate, nbmonth=1):
|
||||
while nbmonth:
|
||||
somedate = last_day(somedate) + ONEDAY
|
||||
nbmonth -= 1
|
||||
return somedate
|
||||
|
||||
def first_day(somedate):
|
||||
return date(somedate.year, somedate.month, 1)
|
||||
|
||||
def last_day(somedate):
|
||||
return date(somedate.year, somedate.month, days_in_month(somedate))
|
||||
|
||||
def ustrftime(somedate, fmt='%Y-%m-%d'):
|
||||
"""like strftime, but returns a unicode string instead of an encoded
|
||||
string which may be problematic with localized date.
|
||||
"""
|
||||
if sys.version_info >= (3, 3):
|
||||
# datetime.date.strftime() supports dates since year 1 in Python >=3.3.
|
||||
return somedate.strftime(fmt)
|
||||
else:
|
||||
try:
|
||||
if sys.version_info < (3, 0):
|
||||
encoding = getlocale(LC_TIME)[1] or 'ascii'
|
||||
return unicode(somedate.strftime(str(fmt)), encoding)
|
||||
else:
|
||||
return somedate.strftime(fmt)
|
||||
except ValueError:
|
||||
if somedate.year >= 1900:
|
||||
raise
|
||||
# datetime is not happy with dates before 1900
|
||||
# we try to work around this, assuming a simple
|
||||
# format string
|
||||
fields = {'Y': somedate.year,
|
||||
'm': somedate.month,
|
||||
'd': somedate.day,
|
||||
}
|
||||
if isinstance(somedate, datetime):
|
||||
fields.update({'H': somedate.hour,
|
||||
'M': somedate.minute,
|
||||
'S': somedate.second})
|
||||
fmt = re.sub('%([YmdHMS])', r'%(\1)02d', fmt)
|
||||
return unicode(fmt) % fields
|
||||
|
||||
def utcdatetime(dt):
|
||||
if dt.tzinfo is None:
|
||||
return dt
|
||||
return (dt.replace(tzinfo=None) - dt.utcoffset())
|
||||
|
||||
def utctime(dt):
|
||||
if dt.tzinfo is None:
|
||||
return dt
|
||||
return (dt + dt.utcoffset() + dt.dst()).replace(tzinfo=None)
|
||||
|
||||
def datetime_to_seconds(date):
|
||||
"""return the number of seconds since the begining of the day for that date
|
||||
"""
|
||||
return date.second+60*date.minute + 3600*date.hour
|
||||
|
||||
def timedelta_to_days(delta):
|
||||
"""return the time delta as a number of seconds"""
|
||||
return delta.days + delta.seconds / (3600*24)
|
||||
|
||||
def timedelta_to_seconds(delta):
|
||||
"""return the time delta as a fraction of days"""
|
||||
return delta.days*(3600*24) + delta.seconds
|
||||
|
|
@ -1,214 +0,0 @@
|
|||
# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
|
||||
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
|
||||
#
|
||||
# This file is part of logilab-common.
|
||||
#
|
||||
# logilab-common is free software: you can redistribute it and/or modify it under
|
||||
# the terms of the GNU Lesser General Public License as published by the Free
|
||||
# Software Foundation, either version 2.1 of the License, or (at your option) any
|
||||
# later version.
|
||||
#
|
||||
# logilab-common is distributed in the hope that it will be useful, but WITHOUT
|
||||
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
|
||||
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
|
||||
# details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License along
|
||||
# with logilab-common. If not, see <http://www.gnu.org/licenses/>.
|
||||
"""Customized version of pdb's default debugger.
|
||||
|
||||
- sets up a history file
|
||||
- uses ipython if available to colorize lines of code
|
||||
- overrides list command to search for current block instead
|
||||
of using 5 lines of context
|
||||
|
||||
|
||||
|
||||
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
__docformat__ = "restructuredtext en"
|
||||
|
||||
try:
|
||||
import readline
|
||||
except ImportError:
|
||||
readline = None
|
||||
import os
|
||||
import os.path as osp
|
||||
import sys
|
||||
from pdb import Pdb
|
||||
import inspect
|
||||
|
||||
from logilab.common.compat import StringIO
|
||||
|
||||
try:
|
||||
from IPython import PyColorize
|
||||
except ImportError:
|
||||
def colorize(source, *args):
|
||||
"""fallback colorize function"""
|
||||
return source
|
||||
def colorize_source(source, *args):
|
||||
return source
|
||||
else:
|
||||
def colorize(source, start_lineno, curlineno):
|
||||
"""colorize and annotate source with linenos
|
||||
(as in pdb's list command)
|
||||
"""
|
||||
parser = PyColorize.Parser()
|
||||
output = StringIO()
|
||||
parser.format(source, output)
|
||||
annotated = []
|
||||
for index, line in enumerate(output.getvalue().splitlines()):
|
||||
lineno = index + start_lineno
|
||||
if lineno == curlineno:
|
||||
annotated.append('%4s\t->\t%s' % (lineno, line))
|
||||
else:
|
||||
annotated.append('%4s\t\t%s' % (lineno, line))
|
||||
return '\n'.join(annotated)
|
||||
|
||||
def colorize_source(source):
|
||||
"""colorize given source"""
|
||||
parser = PyColorize.Parser()
|
||||
output = StringIO()
|
||||
parser.format(source, output)
|
||||
return output.getvalue()
|
||||
|
||||
|
||||
def getsource(obj):
|
||||
"""Return the text of the source code for an object.
|
||||
|
||||
The argument may be a module, class, method, function, traceback, frame,
|
||||
or code object. The source code is returned as a single string. An
|
||||
IOError is raised if the source code cannot be retrieved."""
|
||||
lines, lnum = inspect.getsourcelines(obj)
|
||||
return ''.join(lines), lnum
|
||||
|
||||
|
||||
################################################################
|
||||
class Debugger(Pdb):
|
||||
"""custom debugger
|
||||
|
||||
- sets up a history file
|
||||
- uses ipython if available to colorize lines of code
|
||||
- overrides list command to search for current block instead
|
||||
of using 5 lines of context
|
||||
"""
|
||||
def __init__(self, tcbk=None):
|
||||
Pdb.__init__(self)
|
||||
self.reset()
|
||||
if tcbk:
|
||||
while tcbk.tb_next is not None:
|
||||
tcbk = tcbk.tb_next
|
||||
self._tcbk = tcbk
|
||||
self._histfile = os.path.expanduser("~/.pdbhist")
|
||||
|
||||
def setup_history_file(self):
|
||||
"""if readline is available, read pdb history file
|
||||
"""
|
||||
if readline is not None:
|
||||
try:
|
||||
# XXX try..except shouldn't be necessary
|
||||
# read_history_file() can accept None
|
||||
readline.read_history_file(self._histfile)
|
||||
except IOError:
|
||||
pass
|
||||
|
||||
def start(self):
|
||||
"""starts the interactive mode"""
|
||||
self.interaction(self._tcbk.tb_frame, self._tcbk)
|
||||
|
||||
def setup(self, frame, tcbk):
|
||||
"""setup hook: set up history file"""
|
||||
self.setup_history_file()
|
||||
Pdb.setup(self, frame, tcbk)
|
||||
|
||||
def set_quit(self):
|
||||
"""quit hook: save commands in the history file"""
|
||||
if readline is not None:
|
||||
readline.write_history_file(self._histfile)
|
||||
Pdb.set_quit(self)
|
||||
|
||||
def complete_p(self, text, line, begin_idx, end_idx):
|
||||
"""provide variable names completion for the ``p`` command"""
|
||||
namespace = dict(self.curframe.f_globals)
|
||||
namespace.update(self.curframe.f_locals)
|
||||
if '.' in text:
|
||||
return self.attr_matches(text, namespace)
|
||||
return [varname for varname in namespace if varname.startswith(text)]
|
||||
|
||||
|
||||
def attr_matches(self, text, namespace):
|
||||
"""implementation coming from rlcompleter.Completer.attr_matches
|
||||
Compute matches when text contains a dot.
|
||||
|
||||
Assuming the text is of the form NAME.NAME....[NAME], and is
|
||||
evaluatable in self.namespace, it will be evaluated and its attributes
|
||||
(as revealed by dir()) are used as possible completions. (For class
|
||||
instances, class members are also considered.)
|
||||
|
||||
WARNING: this can still invoke arbitrary C code, if an object
|
||||
with a __getattr__ hook is evaluated.
|
||||
|
||||
"""
|
||||
import re
|
||||
m = re.match(r"(\w+(\.\w+)*)\.(\w*)", text)
|
||||
if not m:
|
||||
return
|
||||
expr, attr = m.group(1, 3)
|
||||
object = eval(expr, namespace)
|
||||
words = dir(object)
|
||||
if hasattr(object, '__class__'):
|
||||
words.append('__class__')
|
||||
words = words + self.get_class_members(object.__class__)
|
||||
matches = []
|
||||
n = len(attr)
|
||||
for word in words:
|
||||
if word[:n] == attr and word != "__builtins__":
|
||||
matches.append("%s.%s" % (expr, word))
|
||||
return matches
|
||||
|
||||
def get_class_members(self, klass):
|
||||
"""implementation coming from rlcompleter.get_class_members"""
|
||||
ret = dir(klass)
|
||||
if hasattr(klass, '__bases__'):
|
||||
for base in klass.__bases__:
|
||||
ret = ret + self.get_class_members(base)
|
||||
return ret
|
||||
|
||||
## specific / overridden commands
|
||||
def do_list(self, arg):
|
||||
"""overrides default list command to display the surrounding block
|
||||
instead of 5 lines of context
|
||||
"""
|
||||
self.lastcmd = 'list'
|
||||
if not arg:
|
||||
try:
|
||||
source, start_lineno = getsource(self.curframe)
|
||||
print(colorize(''.join(source), start_lineno,
|
||||
self.curframe.f_lineno))
|
||||
except KeyboardInterrupt:
|
||||
pass
|
||||
except IOError:
|
||||
Pdb.do_list(self, arg)
|
||||
else:
|
||||
Pdb.do_list(self, arg)
|
||||
do_l = do_list
|
||||
|
||||
def do_open(self, arg):
|
||||
"""opens source file corresponding to the current stack level"""
|
||||
filename = self.curframe.f_code.co_filename
|
||||
lineno = self.curframe.f_lineno
|
||||
cmd = 'emacsclient --no-wait +%s %s' % (lineno, filename)
|
||||
os.system(cmd)
|
||||
|
||||
do_o = do_open
|
||||
|
||||
def pm():
|
||||
"""use our custom debugger"""
|
||||
dbg = Debugger(sys.last_traceback)
|
||||
dbg.start()
|
||||
|
||||
def set_trace():
|
||||
Debugger().set_trace(sys._getframe().f_back)
|
||||
|
|
@ -1,281 +0,0 @@
|
|||
# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
|
||||
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
|
||||
#
|
||||
# This file is part of logilab-common.
|
||||
#
|
||||
# logilab-common is free software: you can redistribute it and/or modify it under
|
||||
# the terms of the GNU Lesser General Public License as published by the Free
|
||||
# Software Foundation, either version 2.1 of the License, or (at your option) any
|
||||
# later version.
|
||||
#
|
||||
# logilab-common is distributed in the hope that it will be useful, but WITHOUT
|
||||
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
|
||||
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
|
||||
# details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License along
|
||||
# with logilab-common. If not, see <http://www.gnu.org/licenses/>.
|
||||
""" A few useful function/method decorators. """
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
__docformat__ = "restructuredtext en"
|
||||
|
||||
import sys
|
||||
import types
|
||||
from time import clock, time
|
||||
from inspect import isgeneratorfunction, getargspec
|
||||
|
||||
from logilab.common.compat import method_type
|
||||
|
||||
# XXX rewrite so we can use the decorator syntax when keyarg has to be specified
|
||||
|
||||
class cached_decorator(object):
|
||||
def __init__(self, cacheattr=None, keyarg=None):
|
||||
self.cacheattr = cacheattr
|
||||
self.keyarg = keyarg
|
||||
def __call__(self, callableobj=None):
|
||||
assert not isgeneratorfunction(callableobj), \
|
||||
'cannot cache generator function: %s' % callableobj
|
||||
if len(getargspec(callableobj).args) == 1 or self.keyarg == 0:
|
||||
cache = _SingleValueCache(callableobj, self.cacheattr)
|
||||
elif self.keyarg:
|
||||
cache = _MultiValuesKeyArgCache(callableobj, self.keyarg, self.cacheattr)
|
||||
else:
|
||||
cache = _MultiValuesCache(callableobj, self.cacheattr)
|
||||
return cache.closure()
|
||||
|
||||
class _SingleValueCache(object):
|
||||
def __init__(self, callableobj, cacheattr=None):
|
||||
self.callable = callableobj
|
||||
if cacheattr is None:
|
||||
self.cacheattr = '_%s_cache_' % callableobj.__name__
|
||||
else:
|
||||
assert cacheattr != callableobj.__name__
|
||||
self.cacheattr = cacheattr
|
||||
|
||||
def __call__(__me, self, *args):
|
||||
try:
|
||||
return self.__dict__[__me.cacheattr]
|
||||
except KeyError:
|
||||
value = __me.callable(self, *args)
|
||||
setattr(self, __me.cacheattr, value)
|
||||
return value
|
||||
|
||||
def closure(self):
|
||||
def wrapped(*args, **kwargs):
|
||||
return self.__call__(*args, **kwargs)
|
||||
wrapped.cache_obj = self
|
||||
try:
|
||||
wrapped.__doc__ = self.callable.__doc__
|
||||
wrapped.__name__ = self.callable.__name__
|
||||
except:
|
||||
pass
|
||||
return wrapped
|
||||
|
||||
def clear(self, holder):
|
||||
holder.__dict__.pop(self.cacheattr, None)
|
||||
|
||||
|
||||
class _MultiValuesCache(_SingleValueCache):
|
||||
def _get_cache(self, holder):
|
||||
try:
|
||||
_cache = holder.__dict__[self.cacheattr]
|
||||
except KeyError:
|
||||
_cache = {}
|
||||
setattr(holder, self.cacheattr, _cache)
|
||||
return _cache
|
||||
|
||||
def __call__(__me, self, *args, **kwargs):
|
||||
_cache = __me._get_cache(self)
|
||||
try:
|
||||
return _cache[args]
|
||||
except KeyError:
|
||||
_cache[args] = __me.callable(self, *args)
|
||||
return _cache[args]
|
||||
|
||||
class _MultiValuesKeyArgCache(_MultiValuesCache):
|
||||
def __init__(self, callableobj, keyarg, cacheattr=None):
|
||||
super(_MultiValuesKeyArgCache, self).__init__(callableobj, cacheattr)
|
||||
self.keyarg = keyarg
|
||||
|
||||
def __call__(__me, self, *args, **kwargs):
|
||||
_cache = __me._get_cache(self)
|
||||
key = args[__me.keyarg-1]
|
||||
try:
|
||||
return _cache[key]
|
||||
except KeyError:
|
||||
_cache[key] = __me.callable(self, *args, **kwargs)
|
||||
return _cache[key]
|
||||
|
||||
|
||||
def cached(callableobj=None, keyarg=None, **kwargs):
|
||||
"""Simple decorator to cache result of method call."""
|
||||
kwargs['keyarg'] = keyarg
|
||||
decorator = cached_decorator(**kwargs)
|
||||
if callableobj is None:
|
||||
return decorator
|
||||
else:
|
||||
return decorator(callableobj)
|
||||
|
||||
|
||||
class cachedproperty(object):
|
||||
""" Provides a cached property equivalent to the stacking of
|
||||
@cached and @property, but more efficient.
|
||||
|
||||
After first usage, the <property_name> becomes part of the object's
|
||||
__dict__. Doing:
|
||||
|
||||
del obj.<property_name> empties the cache.
|
||||
|
||||
Idea taken from the pyramid_ framework and the mercurial_ project.
|
||||
|
||||
.. _pyramid: http://pypi.python.org/pypi/pyramid
|
||||
.. _mercurial: http://pypi.python.org/pypi/Mercurial
|
||||
"""
|
||||
__slots__ = ('wrapped',)
|
||||
|
||||
def __init__(self, wrapped):
|
||||
try:
|
||||
wrapped.__name__
|
||||
except AttributeError:
|
||||
raise TypeError('%s must have a __name__ attribute' %
|
||||
wrapped)
|
||||
self.wrapped = wrapped
|
||||
|
||||
@property
|
||||
def __doc__(self):
|
||||
doc = getattr(self.wrapped, '__doc__', None)
|
||||
return ('<wrapped by the cachedproperty decorator>%s'
|
||||
% ('\n%s' % doc if doc else ''))
|
||||
|
||||
def __get__(self, inst, objtype=None):
|
||||
if inst is None:
|
||||
return self
|
||||
val = self.wrapped(inst)
|
||||
setattr(inst, self.wrapped.__name__, val)
|
||||
return val
|
||||
|
||||
|
||||
def get_cache_impl(obj, funcname):
|
||||
cls = obj.__class__
|
||||
member = getattr(cls, funcname)
|
||||
if isinstance(member, property):
|
||||
member = member.fget
|
||||
return member.cache_obj
|
||||
|
||||
def clear_cache(obj, funcname):
|
||||
"""Clear a cache handled by the :func:`cached` decorator. If 'x' class has
|
||||
@cached on its method `foo`, type
|
||||
|
||||
>>> clear_cache(x, 'foo')
|
||||
|
||||
to purge this method's cache on the instance.
|
||||
"""
|
||||
get_cache_impl(obj, funcname).clear(obj)
|
||||
|
||||
def copy_cache(obj, funcname, cacheobj):
|
||||
"""Copy cache for <funcname> from cacheobj to obj."""
|
||||
cacheattr = get_cache_impl(obj, funcname).cacheattr
|
||||
try:
|
||||
setattr(obj, cacheattr, cacheobj.__dict__[cacheattr])
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
|
||||
class wproperty(object):
|
||||
"""Simple descriptor expecting to take a modifier function as first argument
|
||||
and looking for a _<function name> to retrieve the attribute.
|
||||
"""
|
||||
def __init__(self, setfunc):
|
||||
self.setfunc = setfunc
|
||||
self.attrname = '_%s' % setfunc.__name__
|
||||
|
||||
def __set__(self, obj, value):
|
||||
self.setfunc(obj, value)
|
||||
|
||||
def __get__(self, obj, cls):
|
||||
assert obj is not None
|
||||
return getattr(obj, self.attrname)
|
||||
|
||||
|
||||
class classproperty(object):
|
||||
"""this is a simple property-like class but for class attributes.
|
||||
"""
|
||||
def __init__(self, get):
|
||||
self.get = get
|
||||
def __get__(self, inst, cls):
|
||||
return self.get(cls)
|
||||
|
||||
|
||||
class iclassmethod(object):
|
||||
'''Descriptor for method which should be available as class method if called
|
||||
on the class or instance method if called on an instance.
|
||||
'''
|
||||
def __init__(self, func):
|
||||
self.func = func
|
||||
def __get__(self, instance, objtype):
|
||||
if instance is None:
|
||||
return method_type(self.func, objtype, objtype.__class__)
|
||||
return method_type(self.func, instance, objtype)
|
||||
def __set__(self, instance, value):
|
||||
raise AttributeError("can't set attribute")
|
||||
|
||||
|
||||
def timed(f):
|
||||
def wrap(*args, **kwargs):
|
||||
t = time()
|
||||
c = clock()
|
||||
res = f(*args, **kwargs)
|
||||
print('%s clock: %.9f / time: %.9f' % (f.__name__,
|
||||
clock() - c, time() - t))
|
||||
return res
|
||||
return wrap
|
||||
|
||||
|
||||
def locked(acquire, release):
|
||||
"""Decorator taking two methods to acquire/release a lock as argument,
|
||||
returning a decorator function which will call the inner method after
|
||||
having called acquire(self) et will call release(self) afterwards.
|
||||
"""
|
||||
def decorator(f):
|
||||
def wrapper(self, *args, **kwargs):
|
||||
acquire(self)
|
||||
try:
|
||||
return f(self, *args, **kwargs)
|
||||
finally:
|
||||
release(self)
|
||||
return wrapper
|
||||
return decorator
|
||||
|
||||
|
||||
def monkeypatch(klass, methodname=None):
|
||||
"""Decorator extending class with the decorated callable. This is basically
|
||||
a syntactic sugar vs class assignment.
|
||||
|
||||
>>> class A:
|
||||
... pass
|
||||
>>> @monkeypatch(A)
|
||||
... def meth(self):
|
||||
... return 12
|
||||
...
|
||||
>>> a = A()
|
||||
>>> a.meth()
|
||||
12
|
||||
>>> @monkeypatch(A, 'foo')
|
||||
... def meth(self):
|
||||
... return 12
|
||||
...
|
||||
>>> a.foo()
|
||||
12
|
||||
"""
|
||||
def decorator(func):
|
||||
try:
|
||||
name = methodname or func.__name__
|
||||
except AttributeError:
|
||||
raise AttributeError('%s has no __name__ attribute: '
|
||||
'you should provide an explicit `methodname`'
|
||||
% func)
|
||||
setattr(klass, name, func)
|
||||
return func
|
||||
return decorator
|
||||
|
|
@ -1,189 +0,0 @@
|
|||
# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
|
||||
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
|
||||
#
|
||||
# This file is part of logilab-common.
|
||||
#
|
||||
# logilab-common is free software: you can redistribute it and/or modify it under
|
||||
# the terms of the GNU Lesser General Public License as published by the Free
|
||||
# Software Foundation, either version 2.1 of the License, or (at your option) any
|
||||
# later version.
|
||||
#
|
||||
# logilab-common is distributed in the hope that it will be useful, but WITHOUT
|
||||
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
|
||||
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
|
||||
# details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License along
|
||||
# with logilab-common. If not, see <http://www.gnu.org/licenses/>.
|
||||
"""Deprecation utilities."""
|
||||
|
||||
__docformat__ = "restructuredtext en"
|
||||
|
||||
import sys
|
||||
from warnings import warn
|
||||
|
||||
from logilab.common.changelog import Version
|
||||
|
||||
|
||||
class DeprecationWrapper(object):
|
||||
"""proxy to print a warning on access to any attribute of the wrapped object
|
||||
"""
|
||||
def __init__(self, proxied, msg=None):
|
||||
self._proxied = proxied
|
||||
self._msg = msg
|
||||
|
||||
def __getattr__(self, attr):
|
||||
warn(self._msg, DeprecationWarning, stacklevel=2)
|
||||
return getattr(self._proxied, attr)
|
||||
|
||||
def __setattr__(self, attr, value):
|
||||
if attr in ('_proxied', '_msg'):
|
||||
self.__dict__[attr] = value
|
||||
else:
|
||||
warn(self._msg, DeprecationWarning, stacklevel=2)
|
||||
setattr(self._proxied, attr, value)
|
||||
|
||||
|
||||
class DeprecationManager(object):
|
||||
"""Manage the deprecation message handling. Messages are dropped for
|
||||
versions more recent than the 'compatible' version. Example::
|
||||
|
||||
deprecator = deprecation.DeprecationManager("module_name")
|
||||
deprecator.compatibility('1.3')
|
||||
|
||||
deprecator.warn('1.2', "message.")
|
||||
|
||||
@deprecator.deprecated('1.2', 'Message')
|
||||
def any_func():
|
||||
pass
|
||||
|
||||
class AnyClass(object):
|
||||
__metaclass__ = deprecator.class_deprecated('1.2')
|
||||
"""
|
||||
def __init__(self, module_name=None):
|
||||
"""
|
||||
"""
|
||||
self.module_name = module_name
|
||||
self.compatible_version = None
|
||||
|
||||
def compatibility(self, compatible_version):
|
||||
"""Set the compatible version.
|
||||
"""
|
||||
self.compatible_version = Version(compatible_version)
|
||||
|
||||
def deprecated(self, version=None, reason=None, stacklevel=2, name=None, doc=None):
|
||||
"""Display a deprecation message only if the version is older than the
|
||||
compatible version.
|
||||
"""
|
||||
def decorator(func):
|
||||
message = reason or 'The function "%s" is deprecated'
|
||||
if '%s' in message:
|
||||
message %= func.__name__
|
||||
def wrapped(*args, **kwargs):
|
||||
self.warn(version, message, stacklevel+1)
|
||||
return func(*args, **kwargs)
|
||||
return wrapped
|
||||
return decorator
|
||||
|
||||
def class_deprecated(self, version=None):
|
||||
class metaclass(type):
|
||||
"""metaclass to print a warning on instantiation of a deprecated class"""
|
||||
|
||||
def __call__(cls, *args, **kwargs):
|
||||
msg = getattr(cls, "__deprecation_warning__",
|
||||
"%(cls)s is deprecated") % {'cls': cls.__name__}
|
||||
self.warn(version, msg, stacklevel=3)
|
||||
return type.__call__(cls, *args, **kwargs)
|
||||
return metaclass
|
||||
|
||||
def moved(self, version, modpath, objname):
|
||||
"""use to tell that a callable has been moved to a new module.
|
||||
|
||||
It returns a callable wrapper, so that when its called a warning is printed
|
||||
telling where the object can be found, import is done (and not before) and
|
||||
the actual object is called.
|
||||
|
||||
NOTE: the usage is somewhat limited on classes since it will fail if the
|
||||
wrapper is use in a class ancestors list, use the `class_moved` function
|
||||
instead (which has no lazy import feature though).
|
||||
"""
|
||||
def callnew(*args, **kwargs):
|
||||
from logilab.common.modutils import load_module_from_name
|
||||
message = "object %s has been moved to module %s" % (objname, modpath)
|
||||
self.warn(version, message)
|
||||
m = load_module_from_name(modpath)
|
||||
return getattr(m, objname)(*args, **kwargs)
|
||||
return callnew
|
||||
|
||||
def class_renamed(self, version, old_name, new_class, message=None):
|
||||
clsdict = {}
|
||||
if message is None:
|
||||
message = '%s is deprecated, use %s' % (old_name, new_class.__name__)
|
||||
clsdict['__deprecation_warning__'] = message
|
||||
try:
|
||||
# new-style class
|
||||
return self.class_deprecated(version)(old_name, (new_class,), clsdict)
|
||||
except (NameError, TypeError):
|
||||
# old-style class
|
||||
warn = self.warn
|
||||
class DeprecatedClass(new_class):
|
||||
"""FIXME: There might be a better way to handle old/new-style class
|
||||
"""
|
||||
def __init__(self, *args, **kwargs):
|
||||
warn(version, message, stacklevel=3)
|
||||
new_class.__init__(self, *args, **kwargs)
|
||||
return DeprecatedClass
|
||||
|
||||
def class_moved(self, version, new_class, old_name=None, message=None):
|
||||
"""nice wrapper around class_renamed when a class has been moved into
|
||||
another module
|
||||
"""
|
||||
if old_name is None:
|
||||
old_name = new_class.__name__
|
||||
if message is None:
|
||||
message = 'class %s is now available as %s.%s' % (
|
||||
old_name, new_class.__module__, new_class.__name__)
|
||||
return self.class_renamed(version, old_name, new_class, message)
|
||||
|
||||
def warn(self, version=None, reason="", stacklevel=2):
|
||||
"""Display a deprecation message only if the version is older than the
|
||||
compatible version.
|
||||
"""
|
||||
if (self.compatible_version is None
|
||||
or version is None
|
||||
or Version(version) < self.compatible_version):
|
||||
if self.module_name and version:
|
||||
reason = '[%s %s] %s' % (self.module_name, version, reason)
|
||||
elif self.module_name:
|
||||
reason = '[%s] %s' % (self.module_name, reason)
|
||||
elif version:
|
||||
reason = '[%s] %s' % (version, reason)
|
||||
warn(reason, DeprecationWarning, stacklevel=stacklevel)
|
||||
|
||||
_defaultdeprecator = DeprecationManager()
|
||||
|
||||
def deprecated(reason=None, stacklevel=2, name=None, doc=None):
|
||||
return _defaultdeprecator.deprecated(None, reason, stacklevel, name, doc)
|
||||
|
||||
class_deprecated = _defaultdeprecator.class_deprecated()
|
||||
|
||||
def moved(modpath, objname):
|
||||
return _defaultdeprecator.moved(None, modpath, objname)
|
||||
moved.__doc__ = _defaultdeprecator.moved.__doc__
|
||||
|
||||
def class_renamed(old_name, new_class, message=None):
|
||||
"""automatically creates a class which fires a DeprecationWarning
|
||||
when instantiated.
|
||||
|
||||
>>> Set = class_renamed('Set', set, 'Set is now replaced by set')
|
||||
>>> s = Set()
|
||||
sample.py:57: DeprecationWarning: Set is now replaced by set
|
||||
s = Set()
|
||||
>>>
|
||||
"""
|
||||
return _defaultdeprecator.class_renamed(None, old_name, new_class, message)
|
||||
|
||||
def class_moved(new_class, old_name=None, message=None):
|
||||
return _defaultdeprecator.class_moved(None, new_class, old_name, message)
|
||||
class_moved.__doc__ = _defaultdeprecator.class_moved.__doc__
|
||||
|
||||
|
|
@ -1,404 +0,0 @@
|
|||
# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
|
||||
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
|
||||
#
|
||||
# This file is part of logilab-common.
|
||||
#
|
||||
# logilab-common is free software: you can redistribute it and/or modify it under
|
||||
# the terms of the GNU Lesser General Public License as published by the Free
|
||||
# Software Foundation, either version 2.1 of the License, or (at your option) any
|
||||
# later version.
|
||||
#
|
||||
# logilab-common is distributed in the hope that it will be useful, but WITHOUT
|
||||
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
|
||||
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
|
||||
# details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License along
|
||||
# with logilab-common. If not, see <http://www.gnu.org/licenses/>.
|
||||
"""File and file-path manipulation utilities.
|
||||
|
||||
:group path manipulation: first_level_directory, relative_path, is_binary,\
|
||||
get_by_ext, remove_dead_links
|
||||
:group file manipulation: norm_read, norm_open, lines, stream_lines, lines,\
|
||||
write_open_mode, ensure_fs_mode, export
|
||||
:sort: path manipulation, file manipulation
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
__docformat__ = "restructuredtext en"
|
||||
|
||||
import sys
|
||||
import shutil
|
||||
import mimetypes
|
||||
from os.path import isabs, isdir, islink, split, exists, normpath, join
|
||||
from os.path import abspath
|
||||
from os import sep, mkdir, remove, listdir, stat, chmod, walk
|
||||
from stat import ST_MODE, S_IWRITE
|
||||
|
||||
from logilab.common import STD_BLACKLIST as BASE_BLACKLIST, IGNORED_EXTENSIONS
|
||||
from logilab.common.shellutils import find
|
||||
from logilab.common.deprecation import deprecated
|
||||
from logilab.common.compat import FileIO
|
||||
|
||||
def first_level_directory(path):
|
||||
"""Return the first level directory of a path.
|
||||
|
||||
>>> first_level_directory('home/syt/work')
|
||||
'home'
|
||||
>>> first_level_directory('/home/syt/work')
|
||||
'/'
|
||||
>>> first_level_directory('work')
|
||||
'work'
|
||||
>>>
|
||||
|
||||
:type path: str
|
||||
:param path: the path for which we want the first level directory
|
||||
|
||||
:rtype: str
|
||||
:return: the first level directory appearing in `path`
|
||||
"""
|
||||
head, tail = split(path)
|
||||
while head and tail:
|
||||
head, tail = split(head)
|
||||
if tail:
|
||||
return tail
|
||||
# path was absolute, head is the fs root
|
||||
return head
|
||||
|
||||
def abspath_listdir(path):
|
||||
"""Lists path's content using absolute paths.
|
||||
|
||||
>>> os.listdir('/home')
|
||||
['adim', 'alf', 'arthur', 'auc']
|
||||
>>> abspath_listdir('/home')
|
||||
['/home/adim', '/home/alf', '/home/arthur', '/home/auc']
|
||||
"""
|
||||
path = abspath(path)
|
||||
return [join(path, filename) for filename in listdir(path)]
|
||||
|
||||
|
||||
def is_binary(filename):
|
||||
"""Return true if filename may be a binary file, according to it's
|
||||
extension.
|
||||
|
||||
:type filename: str
|
||||
:param filename: the name of the file
|
||||
|
||||
:rtype: bool
|
||||
:return:
|
||||
true if the file is a binary file (actually if it's mime type
|
||||
isn't beginning by text/)
|
||||
"""
|
||||
try:
|
||||
return not mimetypes.guess_type(filename)[0].startswith('text')
|
||||
except AttributeError:
|
||||
return 1
|
||||
|
||||
|
||||
def write_open_mode(filename):
|
||||
"""Return the write mode that should used to open file.
|
||||
|
||||
:type filename: str
|
||||
:param filename: the name of the file
|
||||
|
||||
:rtype: str
|
||||
:return: the mode that should be use to open the file ('w' or 'wb')
|
||||
"""
|
||||
if is_binary(filename):
|
||||
return 'wb'
|
||||
return 'w'
|
||||
|
||||
|
||||
def ensure_fs_mode(filepath, desired_mode=S_IWRITE):
|
||||
"""Check that the given file has the given mode(s) set, else try to
|
||||
set it.
|
||||
|
||||
:type filepath: str
|
||||
:param filepath: path of the file
|
||||
|
||||
:type desired_mode: int
|
||||
:param desired_mode:
|
||||
ORed flags describing the desired mode. Use constants from the
|
||||
`stat` module for file permission's modes
|
||||
"""
|
||||
mode = stat(filepath)[ST_MODE]
|
||||
if not mode & desired_mode:
|
||||
chmod(filepath, mode | desired_mode)
|
||||
|
||||
|
||||
# XXX (syt) unused? kill?
|
||||
class ProtectedFile(FileIO):
|
||||
"""A special file-object class that automatically does a 'chmod +w' when
|
||||
needed.
|
||||
|
||||
XXX: for now, the way it is done allows 'normal file-objects' to be
|
||||
created during the ProtectedFile object lifetime.
|
||||
One way to circumvent this would be to chmod / unchmod on each
|
||||
write operation.
|
||||
|
||||
One other way would be to :
|
||||
|
||||
- catch the IOError in the __init__
|
||||
|
||||
- if IOError, then create a StringIO object
|
||||
|
||||
- each write operation writes in this StringIO object
|
||||
|
||||
- on close()/del(), write/append the StringIO content to the file and
|
||||
do the chmod only once
|
||||
"""
|
||||
def __init__(self, filepath, mode):
|
||||
self.original_mode = stat(filepath)[ST_MODE]
|
||||
self.mode_changed = False
|
||||
if mode in ('w', 'a', 'wb', 'ab'):
|
||||
if not self.original_mode & S_IWRITE:
|
||||
chmod(filepath, self.original_mode | S_IWRITE)
|
||||
self.mode_changed = True
|
||||
FileIO.__init__(self, filepath, mode)
|
||||
|
||||
def _restore_mode(self):
|
||||
"""restores the original mode if needed"""
|
||||
if self.mode_changed:
|
||||
chmod(self.name, self.original_mode)
|
||||
# Don't re-chmod in case of several restore
|
||||
self.mode_changed = False
|
||||
|
||||
def close(self):
|
||||
"""restore mode before closing"""
|
||||
self._restore_mode()
|
||||
FileIO.close(self)
|
||||
|
||||
def __del__(self):
|
||||
if not self.closed:
|
||||
self.close()
|
||||
|
||||
|
||||
class UnresolvableError(Exception):
|
||||
"""Exception raised by relative path when it's unable to compute relative
|
||||
path between two paths.
|
||||
"""
|
||||
|
||||
def relative_path(from_file, to_file):
|
||||
"""Try to get a relative path from `from_file` to `to_file`
|
||||
(path will be absolute if to_file is an absolute file). This function
|
||||
is useful to create link in `from_file` to `to_file`. This typical use
|
||||
case is used in this function description.
|
||||
|
||||
If both files are relative, they're expected to be relative to the same
|
||||
directory.
|
||||
|
||||
>>> relative_path( from_file='toto/index.html', to_file='index.html')
|
||||
'../index.html'
|
||||
>>> relative_path( from_file='index.html', to_file='toto/index.html')
|
||||
'toto/index.html'
|
||||
>>> relative_path( from_file='tutu/index.html', to_file='toto/index.html')
|
||||
'../toto/index.html'
|
||||
>>> relative_path( from_file='toto/index.html', to_file='/index.html')
|
||||
'/index.html'
|
||||
>>> relative_path( from_file='/toto/index.html', to_file='/index.html')
|
||||
'../index.html'
|
||||
>>> relative_path( from_file='/toto/index.html', to_file='/toto/summary.html')
|
||||
'summary.html'
|
||||
>>> relative_path( from_file='index.html', to_file='index.html')
|
||||
''
|
||||
>>> relative_path( from_file='/index.html', to_file='toto/index.html')
|
||||
Traceback (most recent call last):
|
||||
File "<string>", line 1, in ?
|
||||
File "<stdin>", line 37, in relative_path
|
||||
UnresolvableError
|
||||
>>> relative_path( from_file='/index.html', to_file='/index.html')
|
||||
''
|
||||
>>>
|
||||
|
||||
:type from_file: str
|
||||
:param from_file: source file (where links will be inserted)
|
||||
|
||||
:type to_file: str
|
||||
:param to_file: target file (on which links point)
|
||||
|
||||
:raise UnresolvableError: if it has been unable to guess a correct path
|
||||
|
||||
:rtype: str
|
||||
:return: the relative path of `to_file` from `from_file`
|
||||
"""
|
||||
from_file = normpath(from_file)
|
||||
to_file = normpath(to_file)
|
||||
if from_file == to_file:
|
||||
return ''
|
||||
if isabs(to_file):
|
||||
if not isabs(from_file):
|
||||
return to_file
|
||||
elif isabs(from_file):
|
||||
raise UnresolvableError()
|
||||
from_parts = from_file.split(sep)
|
||||
to_parts = to_file.split(sep)
|
||||
idem = 1
|
||||
result = []
|
||||
while len(from_parts) > 1:
|
||||
dirname = from_parts.pop(0)
|
||||
if idem and len(to_parts) > 1 and dirname == to_parts[0]:
|
||||
to_parts.pop(0)
|
||||
else:
|
||||
idem = 0
|
||||
result.append('..')
|
||||
result += to_parts
|
||||
return sep.join(result)
|
||||
|
||||
|
||||
def norm_read(path):
|
||||
"""Return the content of the file with normalized line feeds.
|
||||
|
||||
:type path: str
|
||||
:param path: path to the file to read
|
||||
|
||||
:rtype: str
|
||||
:return: the content of the file with normalized line feeds
|
||||
"""
|
||||
return open(path, 'U').read()
|
||||
norm_read = deprecated("use \"open(path, 'U').read()\"")(norm_read)
|
||||
|
||||
def norm_open(path):
|
||||
"""Return a stream for a file with content with normalized line feeds.
|
||||
|
||||
:type path: str
|
||||
:param path: path to the file to open
|
||||
|
||||
:rtype: file or StringIO
|
||||
:return: the opened file with normalized line feeds
|
||||
"""
|
||||
return open(path, 'U')
|
||||
norm_open = deprecated("use \"open(path, 'U')\"")(norm_open)
|
||||
|
||||
def lines(path, comments=None):
|
||||
"""Return a list of non empty lines in the file located at `path`.
|
||||
|
||||
:type path: str
|
||||
:param path: path to the file
|
||||
|
||||
:type comments: str or None
|
||||
:param comments:
|
||||
optional string which can be used to comment a line in the file
|
||||
(i.e. lines starting with this string won't be returned)
|
||||
|
||||
:rtype: list
|
||||
:return:
|
||||
a list of stripped line in the file, without empty and commented
|
||||
lines
|
||||
|
||||
:warning: at some point this function will probably return an iterator
|
||||
"""
|
||||
stream = open(path, 'U')
|
||||
result = stream_lines(stream, comments)
|
||||
stream.close()
|
||||
return result
|
||||
|
||||
|
||||
def stream_lines(stream, comments=None):
|
||||
"""Return a list of non empty lines in the given `stream`.
|
||||
|
||||
:type stream: object implementing 'xreadlines' or 'readlines'
|
||||
:param stream: file like object
|
||||
|
||||
:type comments: str or None
|
||||
:param comments:
|
||||
optional string which can be used to comment a line in the file
|
||||
(i.e. lines starting with this string won't be returned)
|
||||
|
||||
:rtype: list
|
||||
:return:
|
||||
a list of stripped line in the file, without empty and commented
|
||||
lines
|
||||
|
||||
:warning: at some point this function will probably return an iterator
|
||||
"""
|
||||
try:
|
||||
readlines = stream.xreadlines
|
||||
except AttributeError:
|
||||
readlines = stream.readlines
|
||||
result = []
|
||||
for line in readlines():
|
||||
line = line.strip()
|
||||
if line and (comments is None or not line.startswith(comments)):
|
||||
result.append(line)
|
||||
return result
|
||||
|
||||
|
||||
def export(from_dir, to_dir,
|
||||
blacklist=BASE_BLACKLIST, ignore_ext=IGNORED_EXTENSIONS,
|
||||
verbose=0):
|
||||
"""Make a mirror of `from_dir` in `to_dir`, omitting directories and
|
||||
files listed in the black list or ending with one of the given
|
||||
extensions.
|
||||
|
||||
:type from_dir: str
|
||||
:param from_dir: directory to export
|
||||
|
||||
:type to_dir: str
|
||||
:param to_dir: destination directory
|
||||
|
||||
:type blacklist: list or tuple
|
||||
:param blacklist:
|
||||
list of files or directories to ignore, default to the content of
|
||||
`BASE_BLACKLIST`
|
||||
|
||||
:type ignore_ext: list or tuple
|
||||
:param ignore_ext:
|
||||
list of extensions to ignore, default to the content of
|
||||
`IGNORED_EXTENSIONS`
|
||||
|
||||
:type verbose: bool
|
||||
:param verbose:
|
||||
flag indicating whether information about exported files should be
|
||||
printed to stderr, default to False
|
||||
"""
|
||||
try:
|
||||
mkdir(to_dir)
|
||||
except OSError:
|
||||
pass # FIXME we should use "exists" if the point is about existing dir
|
||||
# else (permission problems?) shouldn't return / raise ?
|
||||
for directory, dirnames, filenames in walk(from_dir):
|
||||
for norecurs in blacklist:
|
||||
try:
|
||||
dirnames.remove(norecurs)
|
||||
except ValueError:
|
||||
continue
|
||||
for dirname in dirnames:
|
||||
src = join(directory, dirname)
|
||||
dest = to_dir + src[len(from_dir):]
|
||||
if isdir(src):
|
||||
if not exists(dest):
|
||||
mkdir(dest)
|
||||
for filename in filenames:
|
||||
# don't include binary files
|
||||
# endswith does not accept tuple in 2.4
|
||||
if any([filename.endswith(ext) for ext in ignore_ext]):
|
||||
continue
|
||||
src = join(directory, filename)
|
||||
dest = to_dir + src[len(from_dir):]
|
||||
if verbose:
|
||||
print(src, '->', dest, file=sys.stderr)
|
||||
if exists(dest):
|
||||
remove(dest)
|
||||
shutil.copy2(src, dest)
|
||||
|
||||
|
||||
def remove_dead_links(directory, verbose=0):
|
||||
"""Recursively traverse directory and remove all dead links.
|
||||
|
||||
:type directory: str
|
||||
:param directory: directory to cleanup
|
||||
|
||||
:type verbose: bool
|
||||
:param verbose:
|
||||
flag indicating whether information about deleted links should be
|
||||
printed to stderr, default to False
|
||||
"""
|
||||
for dirpath, dirname, filenames in walk(directory):
|
||||
for filename in dirnames + filenames:
|
||||
src = join(dirpath, filename)
|
||||
if islink(src) and not exists(src):
|
||||
if verbose:
|
||||
print('remove dead link', src)
|
||||
remove(src)
|
||||
|
||||
|
|
@ -1,282 +0,0 @@
|
|||
# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
|
||||
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
|
||||
#
|
||||
# This file is part of logilab-common.
|
||||
#
|
||||
# logilab-common is free software: you can redistribute it and/or modify it under
|
||||
# the terms of the GNU Lesser General Public License as published by the Free
|
||||
# Software Foundation, either version 2.1 of the License, or (at your option) any
|
||||
# later version.
|
||||
#
|
||||
# logilab-common is distributed in the hope that it will be useful, but WITHOUT
|
||||
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
|
||||
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
|
||||
# details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License along
|
||||
# with logilab-common. If not, see <http://www.gnu.org/licenses/>.
|
||||
"""Graph manipulation utilities.
|
||||
|
||||
(dot generation adapted from pypy/translator/tool/make_dot.py)
|
||||
"""
|
||||
|
||||
__docformat__ = "restructuredtext en"
|
||||
|
||||
__metaclass__ = type
|
||||
|
||||
import os.path as osp
|
||||
import os
|
||||
import sys
|
||||
import tempfile
|
||||
import codecs
|
||||
import errno
|
||||
|
||||
def escape(value):
|
||||
"""Make <value> usable in a dot file."""
|
||||
lines = [line.replace('"', '\\"') for line in value.split('\n')]
|
||||
data = '\\l'.join(lines)
|
||||
return '\\n' + data
|
||||
|
||||
def target_info_from_filename(filename):
|
||||
"""Transforms /some/path/foo.png into ('/some/path', 'foo.png', 'png')."""
|
||||
basename = osp.basename(filename)
|
||||
storedir = osp.dirname(osp.abspath(filename))
|
||||
target = filename.split('.')[-1]
|
||||
return storedir, basename, target
|
||||
|
||||
|
||||
class DotBackend:
|
||||
"""Dot File backend."""
|
||||
def __init__(self, graphname, rankdir=None, size=None, ratio=None,
|
||||
charset='utf-8', renderer='dot', additionnal_param={}):
|
||||
self.graphname = graphname
|
||||
self.renderer = renderer
|
||||
self.lines = []
|
||||
self._source = None
|
||||
self.emit("digraph %s {" % normalize_node_id(graphname))
|
||||
if rankdir:
|
||||
self.emit('rankdir=%s' % rankdir)
|
||||
if ratio:
|
||||
self.emit('ratio=%s' % ratio)
|
||||
if size:
|
||||
self.emit('size="%s"' % size)
|
||||
if charset:
|
||||
assert charset.lower() in ('utf-8', 'iso-8859-1', 'latin1'), \
|
||||
'unsupported charset %s' % charset
|
||||
self.emit('charset="%s"' % charset)
|
||||
for param in sorted(additionnal_param.items()):
|
||||
self.emit('='.join(param))
|
||||
|
||||
def get_source(self):
|
||||
"""returns self._source"""
|
||||
if self._source is None:
|
||||
self.emit("}\n")
|
||||
self._source = '\n'.join(self.lines)
|
||||
del self.lines
|
||||
return self._source
|
||||
|
||||
source = property(get_source)
|
||||
|
||||
def generate(self, outputfile=None, dotfile=None, mapfile=None):
|
||||
"""Generates a graph file.
|
||||
|
||||
:param outputfile: filename and path [defaults to graphname.png]
|
||||
:param dotfile: filename and path [defaults to graphname.dot]
|
||||
|
||||
:rtype: str
|
||||
:return: a path to the generated file
|
||||
"""
|
||||
import subprocess # introduced in py 2.4
|
||||
name = self.graphname
|
||||
if not dotfile:
|
||||
# if 'outputfile' is a dot file use it as 'dotfile'
|
||||
if outputfile and outputfile.endswith(".dot"):
|
||||
dotfile = outputfile
|
||||
else:
|
||||
dotfile = '%s.dot' % name
|
||||
if outputfile is not None:
|
||||
storedir, basename, target = target_info_from_filename(outputfile)
|
||||
if target != "dot":
|
||||
pdot, dot_sourcepath = tempfile.mkstemp(".dot", name)
|
||||
os.close(pdot)
|
||||
else:
|
||||
dot_sourcepath = osp.join(storedir, dotfile)
|
||||
else:
|
||||
target = 'png'
|
||||
pdot, dot_sourcepath = tempfile.mkstemp(".dot", name)
|
||||
ppng, outputfile = tempfile.mkstemp(".png", name)
|
||||
os.close(pdot)
|
||||
os.close(ppng)
|
||||
pdot = codecs.open(dot_sourcepath, 'w', encoding='utf8')
|
||||
pdot.write(self.source)
|
||||
pdot.close()
|
||||
if target != 'dot':
|
||||
if sys.platform == 'win32':
|
||||
use_shell = True
|
||||
else:
|
||||
use_shell = False
|
||||
try:
|
||||
if mapfile:
|
||||
subprocess.call([self.renderer, '-Tcmapx', '-o', mapfile, '-T', target, dot_sourcepath, '-o', outputfile],
|
||||
shell=use_shell)
|
||||
else:
|
||||
subprocess.call([self.renderer, '-T', target,
|
||||
dot_sourcepath, '-o', outputfile],
|
||||
shell=use_shell)
|
||||
except OSError as e:
|
||||
if e.errno == errno.ENOENT:
|
||||
e.strerror = 'File not found: {0}'.format(self.renderer)
|
||||
raise
|
||||
os.unlink(dot_sourcepath)
|
||||
return outputfile
|
||||
|
||||
def emit(self, line):
|
||||
"""Adds <line> to final output."""
|
||||
self.lines.append(line)
|
||||
|
||||
def emit_edge(self, name1, name2, **props):
|
||||
"""emit an edge from <name1> to <name2>.
|
||||
edge properties: see http://www.graphviz.org/doc/info/attrs.html
|
||||
"""
|
||||
attrs = ['%s="%s"' % (prop, value) for prop, value in props.items()]
|
||||
n_from, n_to = normalize_node_id(name1), normalize_node_id(name2)
|
||||
self.emit('%s -> %s [%s];' % (n_from, n_to, ', '.join(sorted(attrs))) )
|
||||
|
||||
def emit_node(self, name, **props):
|
||||
"""emit a node with given properties.
|
||||
node properties: see http://www.graphviz.org/doc/info/attrs.html
|
||||
"""
|
||||
attrs = ['%s="%s"' % (prop, value) for prop, value in props.items()]
|
||||
self.emit('%s [%s];' % (normalize_node_id(name), ', '.join(sorted(attrs))))
|
||||
|
||||
def normalize_node_id(nid):
|
||||
"""Returns a suitable DOT node id for `nid`."""
|
||||
return '"%s"' % nid
|
||||
|
||||
class GraphGenerator:
|
||||
def __init__(self, backend):
|
||||
# the backend is responsible to output the graph in a particular format
|
||||
self.backend = backend
|
||||
|
||||
# XXX doesn't like space in outpufile / mapfile
|
||||
def generate(self, visitor, propshdlr, outputfile=None, mapfile=None):
|
||||
# the visitor
|
||||
# the property handler is used to get node and edge properties
|
||||
# according to the graph and to the backend
|
||||
self.propshdlr = propshdlr
|
||||
for nodeid, node in visitor.nodes():
|
||||
props = propshdlr.node_properties(node)
|
||||
self.backend.emit_node(nodeid, **props)
|
||||
for subjnode, objnode, edge in visitor.edges():
|
||||
props = propshdlr.edge_properties(edge, subjnode, objnode)
|
||||
self.backend.emit_edge(subjnode, objnode, **props)
|
||||
return self.backend.generate(outputfile=outputfile, mapfile=mapfile)
|
||||
|
||||
|
||||
class UnorderableGraph(Exception):
|
||||
pass
|
||||
|
||||
def ordered_nodes(graph):
|
||||
"""takes a dependency graph dict as arguments and return an ordered tuple of
|
||||
nodes starting with nodes without dependencies and up to the outermost node.
|
||||
|
||||
If there is some cycle in the graph, :exc:`UnorderableGraph` will be raised.
|
||||
|
||||
Also the given graph dict will be emptied.
|
||||
"""
|
||||
# check graph consistency
|
||||
cycles = get_cycles(graph)
|
||||
if cycles:
|
||||
cycles = '\n'.join([' -> '.join(cycle) for cycle in cycles])
|
||||
raise UnorderableGraph('cycles in graph: %s' % cycles)
|
||||
vertices = set(graph)
|
||||
to_vertices = set()
|
||||
for edges in graph.values():
|
||||
to_vertices |= set(edges)
|
||||
missing_vertices = to_vertices - vertices
|
||||
if missing_vertices:
|
||||
raise UnorderableGraph('missing vertices: %s' % ', '.join(missing_vertices))
|
||||
# order vertices
|
||||
order = []
|
||||
order_set = set()
|
||||
old_len = None
|
||||
while graph:
|
||||
if old_len == len(graph):
|
||||
raise UnorderableGraph('unknown problem with %s' % graph)
|
||||
old_len = len(graph)
|
||||
deps_ok = []
|
||||
for node, node_deps in graph.items():
|
||||
for dep in node_deps:
|
||||
if dep not in order_set:
|
||||
break
|
||||
else:
|
||||
deps_ok.append(node)
|
||||
order.append(deps_ok)
|
||||
order_set |= set(deps_ok)
|
||||
for node in deps_ok:
|
||||
del graph[node]
|
||||
result = []
|
||||
for grp in reversed(order):
|
||||
result.extend(sorted(grp))
|
||||
return tuple(result)
|
||||
|
||||
|
||||
def get_cycles(graph_dict, vertices=None):
|
||||
'''given a dictionary representing an ordered graph (i.e. key are vertices
|
||||
and values is a list of destination vertices representing edges), return a
|
||||
list of detected cycles
|
||||
'''
|
||||
if not graph_dict:
|
||||
return ()
|
||||
result = []
|
||||
if vertices is None:
|
||||
vertices = graph_dict.keys()
|
||||
for vertice in vertices:
|
||||
_get_cycles(graph_dict, [], set(), result, vertice)
|
||||
return result
|
||||
|
||||
def _get_cycles(graph_dict, path, visited, result, vertice):
|
||||
"""recursive function doing the real work for get_cycles"""
|
||||
if vertice in path:
|
||||
cycle = [vertice]
|
||||
for node in path[::-1]:
|
||||
if node == vertice:
|
||||
break
|
||||
cycle.insert(0, node)
|
||||
# make a canonical representation
|
||||
start_from = min(cycle)
|
||||
index = cycle.index(start_from)
|
||||
cycle = cycle[index:] + cycle[0:index]
|
||||
# append it to result if not already in
|
||||
if not cycle in result:
|
||||
result.append(cycle)
|
||||
return
|
||||
path.append(vertice)
|
||||
try:
|
||||
for node in graph_dict[vertice]:
|
||||
# don't check already visited nodes again
|
||||
if node not in visited:
|
||||
_get_cycles(graph_dict, path, visited, result, node)
|
||||
visited.add(node)
|
||||
except KeyError:
|
||||
pass
|
||||
path.pop()
|
||||
|
||||
def has_path(graph_dict, fromnode, tonode, path=None):
|
||||
"""generic function taking a simple graph definition as a dictionary, with
|
||||
node has key associated to a list of nodes directly reachable from it.
|
||||
|
||||
Return None if no path exists to go from `fromnode` to `tonode`, else the
|
||||
first path found (as a list including the destination node at last)
|
||||
"""
|
||||
if path is None:
|
||||
path = []
|
||||
elif fromnode in path:
|
||||
return None
|
||||
path.append(fromnode)
|
||||
for destnode in graph_dict[fromnode]:
|
||||
if destnode == tonode or has_path(graph_dict, destnode, tonode, path):
|
||||
return path[1:] + [tonode]
|
||||
path.pop()
|
||||
return None
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue