1044 lines
48 KiB
Python
1044 lines
48 KiB
Python
|
"""Generate fine-grained dependencies for AST nodes, for use in the daemon mode.
|
||
|
|
||
|
Dependencies are stored in a map from *triggers* to *sets of affected locations*.
|
||
|
|
||
|
A trigger is a string that represents a program property that has changed, such
|
||
|
as the signature of a specific function. Triggers are written as '<...>' (angle
|
||
|
brackets). When a program property changes, we determine the relevant trigger(s)
|
||
|
and all affected locations. The latter are stale and will have to be reprocessed.
|
||
|
|
||
|
An affected location is a string than can refer to a *target* (a non-nested
|
||
|
function or method, or a module top level), a class, or a trigger (for
|
||
|
recursively triggering other triggers).
|
||
|
|
||
|
Here's an example representation of a simple dependency map (in format
|
||
|
"<trigger> -> locations"):
|
||
|
|
||
|
<m.A.g> -> m.f
|
||
|
<m.A> -> <m.f>, m.A, m.f
|
||
|
|
||
|
Assuming 'A' is a class, this means that
|
||
|
|
||
|
1) if a property of 'm.A.g', such as the signature, is changed, we need
|
||
|
to process target (function) 'm.f'
|
||
|
|
||
|
2) if the MRO or other significant property of class 'm.A' changes, we
|
||
|
need to process target 'm.f', the entire class 'm.A', and locations
|
||
|
triggered by trigger '<m.f>' (this explanation is a bit simplified;
|
||
|
see below for more details).
|
||
|
|
||
|
The triggers to fire are determined using mypy.server.astdiff.
|
||
|
|
||
|
Examples of triggers:
|
||
|
|
||
|
* '<mod.x>' represents a module attribute/function/class. If any externally
|
||
|
visible property of 'x' changes, this gets fired. For changes within
|
||
|
classes, only "big" changes cause the class to be triggered (such as a
|
||
|
change in MRO). Smaller changes, such as changes to some attributes, don't
|
||
|
trigger the entire class.
|
||
|
* '<mod.Cls.x>' represents the type and kind of attribute/method 'x' of
|
||
|
class 'mod.Cls'. This can also refer to an attribute inherited from a
|
||
|
base class (relevant if it's accessed through a value of type 'Cls'
|
||
|
instead of the base class type).
|
||
|
* '<package.mod>' represents the existence of module 'package.mod'. This
|
||
|
gets triggered if 'package.mod' is created or deleted, or if it gets
|
||
|
changed into something other than a module.
|
||
|
|
||
|
Examples of locations:
|
||
|
|
||
|
* 'mod' is the top level of module 'mod' (doesn't include any function bodies,
|
||
|
but includes class bodies not nested within a function).
|
||
|
* 'mod.f' is function 'f' in module 'mod' (module-level variables aren't separate
|
||
|
locations but are included in the module top level). Functions also include
|
||
|
any nested functions and classes -- such nested definitions aren't separate
|
||
|
locations, for simplicity of implementation.
|
||
|
* 'mod.Cls.f' is method 'f' of 'mod.Cls'. Non-method attributes aren't locations.
|
||
|
* 'mod.Cls' represents each method in class 'mod.Cls' + the top-level of the
|
||
|
module 'mod'. (To simplify the implementation, there is no location that only
|
||
|
includes the body of a class without the entire surrounding module top level.)
|
||
|
* Trigger '<...>' as a location is an indirect way of referring to to all
|
||
|
locations triggered by the trigger. These indirect locations keep the
|
||
|
dependency map smaller and easier to manage.
|
||
|
|
||
|
Triggers can be triggered by program changes such as these:
|
||
|
|
||
|
* Addition or deletion of an attribute (or module).
|
||
|
* Change of the kind of thing a name represents (such as a change from a function
|
||
|
to a class).
|
||
|
* Change of the static type of a name.
|
||
|
|
||
|
Changes in the body of a function that aren't reflected in the signature don't
|
||
|
cause the function to be triggered. More generally, we trigger only on changes
|
||
|
that may affect type checking results outside the module that contains the
|
||
|
change.
|
||
|
|
||
|
We don't generate dependencies from builtins and certain other stdlib modules,
|
||
|
since these change very rarely, and they would just increase the size of the
|
||
|
dependency map significantly without significant benefit.
|
||
|
|
||
|
Test cases for this module live in 'test-data/unit/deps*.test'.
|
||
|
"""
|
||
|
|
||
|
from typing import Dict, List, Set, Optional, Tuple
|
||
|
from typing_extensions import DefaultDict
|
||
|
|
||
|
from mypy.checkmember import bind_self
|
||
|
from mypy.nodes import (
|
||
|
Node, Expression, MypyFile, FuncDef, ClassDef, AssignmentStmt, NameExpr, MemberExpr, Import,
|
||
|
ImportFrom, CallExpr, CastExpr, TypeVarExpr, TypeApplication, IndexExpr, UnaryExpr, OpExpr,
|
||
|
ComparisonExpr, GeneratorExpr, DictionaryComprehension, StarExpr, PrintStmt, ForStmt, WithStmt,
|
||
|
TupleExpr, OperatorAssignmentStmt, DelStmt, YieldFromExpr, Decorator, Block,
|
||
|
TypeInfo, FuncBase, OverloadedFuncDef, RefExpr, SuperExpr, Var, NamedTupleExpr, TypedDictExpr,
|
||
|
LDEF, MDEF, GDEF, TypeAliasExpr, NewTypeExpr, ImportAll, EnumCallExpr, AwaitExpr,
|
||
|
AssertTypeExpr,
|
||
|
)
|
||
|
from mypy.operators import (
|
||
|
op_methods, reverse_op_methods, ops_with_inplace_method, unary_op_methods
|
||
|
)
|
||
|
from mypy.traverser import TraverserVisitor
|
||
|
from mypy.types import (
|
||
|
Type, Instance, AnyType, NoneType, TypeVisitor, CallableType, DeletedType, PartialType,
|
||
|
TupleType, TypeType, TypeVarType, TypedDictType, UnboundType, UninhabitedType, UnionType,
|
||
|
FunctionLike, Overloaded, TypeOfAny, LiteralType, ErasedType, get_proper_type, ProperType,
|
||
|
TypeAliasType, ParamSpecType, Parameters, UnpackType, TypeVarTupleType,
|
||
|
)
|
||
|
from mypy.server.trigger import make_trigger, make_wildcard_trigger
|
||
|
from mypy.util import correct_relative_import
|
||
|
from mypy.scope import Scope
|
||
|
from mypy.typestate import TypeState
|
||
|
from mypy.options import Options
|
||
|
|
||
|
|
||
|
def get_dependencies(target: MypyFile,
|
||
|
type_map: Dict[Expression, Type],
|
||
|
python_version: Tuple[int, int],
|
||
|
options: Options) -> Dict[str, Set[str]]:
|
||
|
"""Get all dependencies of a node, recursively."""
|
||
|
visitor = DependencyVisitor(type_map, python_version, target.alias_deps, options)
|
||
|
target.accept(visitor)
|
||
|
return visitor.map
|
||
|
|
||
|
|
||
|
def get_dependencies_of_target(module_id: str,
|
||
|
module_tree: MypyFile,
|
||
|
target: Node,
|
||
|
type_map: Dict[Expression, Type],
|
||
|
python_version: Tuple[int, int]) -> Dict[str, Set[str]]:
|
||
|
"""Get dependencies of a target -- don't recursive into nested targets."""
|
||
|
# TODO: Add tests for this function.
|
||
|
visitor = DependencyVisitor(type_map, python_version, module_tree.alias_deps)
|
||
|
with visitor.scope.module_scope(module_id):
|
||
|
if isinstance(target, MypyFile):
|
||
|
# Only get dependencies of the top-level of the module. Don't recurse into
|
||
|
# functions.
|
||
|
for defn in target.defs:
|
||
|
# TODO: Recurse into top-level statements and class bodies but skip functions.
|
||
|
if not isinstance(defn, (ClassDef, Decorator, FuncDef, OverloadedFuncDef)):
|
||
|
defn.accept(visitor)
|
||
|
elif isinstance(target, FuncBase) and target.info:
|
||
|
# It's a method.
|
||
|
# TODO: Methods in nested classes.
|
||
|
with visitor.scope.class_scope(target.info):
|
||
|
target.accept(visitor)
|
||
|
else:
|
||
|
target.accept(visitor)
|
||
|
return visitor.map
|
||
|
|
||
|
|
||
|
class DependencyVisitor(TraverserVisitor):
|
||
|
def __init__(self,
|
||
|
type_map: Dict[Expression, Type],
|
||
|
python_version: Tuple[int, int],
|
||
|
alias_deps: 'DefaultDict[str, Set[str]]',
|
||
|
options: Optional[Options] = None) -> None:
|
||
|
self.scope = Scope()
|
||
|
self.type_map = type_map
|
||
|
self.python2 = python_version[0] == 2
|
||
|
# This attribute holds a mapping from target to names of type aliases
|
||
|
# it depends on. These need to be processed specially, since they are
|
||
|
# only present in expanded form in symbol tables. For example, after:
|
||
|
# A = List[int]
|
||
|
# x: A
|
||
|
# The module symbol table will just have a Var `x` with type `List[int]`,
|
||
|
# and the dependency of `x` on `A` is lost. Therefore the alias dependencies
|
||
|
# are preserved at alias expansion points in `semanal.py`, stored as an attribute
|
||
|
# on MypyFile, and then passed here.
|
||
|
self.alias_deps = alias_deps
|
||
|
self.map: Dict[str, Set[str]] = {}
|
||
|
self.is_class = False
|
||
|
self.is_package_init_file = False
|
||
|
self.options = options
|
||
|
|
||
|
def visit_mypy_file(self, o: MypyFile) -> None:
|
||
|
with self.scope.module_scope(o.fullname):
|
||
|
self.is_package_init_file = o.is_package_init_file()
|
||
|
self.add_type_alias_deps(self.scope.current_target())
|
||
|
for trigger, targets in o.plugin_deps.items():
|
||
|
self.map.setdefault(trigger, set()).update(targets)
|
||
|
super().visit_mypy_file(o)
|
||
|
|
||
|
def visit_func_def(self, o: FuncDef) -> None:
|
||
|
with self.scope.function_scope(o):
|
||
|
target = self.scope.current_target()
|
||
|
if o.type:
|
||
|
if self.is_class and isinstance(o.type, FunctionLike):
|
||
|
signature: Type = bind_self(o.type)
|
||
|
else:
|
||
|
signature = o.type
|
||
|
for trigger in self.get_type_triggers(signature):
|
||
|
self.add_dependency(trigger)
|
||
|
self.add_dependency(trigger, target=make_trigger(target))
|
||
|
if o.info:
|
||
|
for base in non_trivial_bases(o.info):
|
||
|
# Base class __init__/__new__ doesn't generate a logical
|
||
|
# dependency since the override can be incompatible.
|
||
|
if not self.use_logical_deps() or o.name not in ('__init__', '__new__'):
|
||
|
self.add_dependency(make_trigger(base.fullname + '.' + o.name))
|
||
|
self.add_type_alias_deps(self.scope.current_target())
|
||
|
super().visit_func_def(o)
|
||
|
variants = set(o.expanded) - {o}
|
||
|
for ex in variants:
|
||
|
if isinstance(ex, FuncDef):
|
||
|
super().visit_func_def(ex)
|
||
|
|
||
|
def visit_decorator(self, o: Decorator) -> None:
|
||
|
if not self.use_logical_deps():
|
||
|
# We don't need to recheck outer scope for an overload, only overload itself.
|
||
|
# Also if any decorator is nested, it is not externally visible, so we don't need to
|
||
|
# generate dependency.
|
||
|
if not o.func.is_overload and self.scope.current_function_name() is None:
|
||
|
self.add_dependency(make_trigger(o.func.fullname))
|
||
|
else:
|
||
|
# Add logical dependencies from decorators to the function. For example,
|
||
|
# if we have
|
||
|
# @dec
|
||
|
# def func(): ...
|
||
|
# then if `dec` is unannotated, then it will "spoil" `func` and consequently
|
||
|
# all call sites, making them all `Any`.
|
||
|
for d in o.decorators:
|
||
|
tname: Optional[str] = None
|
||
|
if isinstance(d, RefExpr) and d.fullname is not None:
|
||
|
tname = d.fullname
|
||
|
if (isinstance(d, CallExpr) and isinstance(d.callee, RefExpr) and
|
||
|
d.callee.fullname is not None):
|
||
|
tname = d.callee.fullname
|
||
|
if tname is not None:
|
||
|
self.add_dependency(make_trigger(tname), make_trigger(o.func.fullname))
|
||
|
super().visit_decorator(o)
|
||
|
|
||
|
def visit_class_def(self, o: ClassDef) -> None:
|
||
|
with self.scope.class_scope(o.info):
|
||
|
target = self.scope.current_full_target()
|
||
|
self.add_dependency(make_trigger(target), target)
|
||
|
old_is_class = self.is_class
|
||
|
self.is_class = True
|
||
|
# Add dependencies to type variables of a generic class.
|
||
|
for tv in o.type_vars:
|
||
|
self.add_dependency(make_trigger(tv.fullname), target)
|
||
|
self.process_type_info(o.info)
|
||
|
super().visit_class_def(o)
|
||
|
self.is_class = old_is_class
|
||
|
|
||
|
def visit_newtype_expr(self, o: NewTypeExpr) -> None:
|
||
|
if o.info:
|
||
|
with self.scope.class_scope(o.info):
|
||
|
self.process_type_info(o.info)
|
||
|
|
||
|
def process_type_info(self, info: TypeInfo) -> None:
|
||
|
target = self.scope.current_full_target()
|
||
|
for base in info.bases:
|
||
|
self.add_type_dependencies(base, target=target)
|
||
|
if info.tuple_type:
|
||
|
self.add_type_dependencies(info.tuple_type, target=make_trigger(target))
|
||
|
if info.typeddict_type:
|
||
|
self.add_type_dependencies(info.typeddict_type, target=make_trigger(target))
|
||
|
if info.declared_metaclass:
|
||
|
self.add_type_dependencies(info.declared_metaclass, target=make_trigger(target))
|
||
|
if info.is_protocol:
|
||
|
for base_info in info.mro[:-1]:
|
||
|
# We add dependencies from whole MRO to cover explicit subprotocols.
|
||
|
# For example:
|
||
|
#
|
||
|
# class Super(Protocol):
|
||
|
# x: int
|
||
|
# class Sub(Super, Protocol):
|
||
|
# y: int
|
||
|
#
|
||
|
# In this example we add <Super[wildcard]> -> <Sub>, to invalidate Sub if
|
||
|
# a new member is added to Super.
|
||
|
self.add_dependency(make_wildcard_trigger(base_info.fullname),
|
||
|
target=make_trigger(target))
|
||
|
# More protocol dependencies are collected in TypeState._snapshot_protocol_deps
|
||
|
# after a full run or update is finished.
|
||
|
|
||
|
self.add_type_alias_deps(self.scope.current_target())
|
||
|
for name, node in info.names.items():
|
||
|
if isinstance(node.node, Var):
|
||
|
# Recheck Liskov if needed, self definitions are checked in the defining method
|
||
|
if node.node.is_initialized_in_class and has_user_bases(info):
|
||
|
self.add_dependency(make_trigger(info.fullname + '.' + name))
|
||
|
for base_info in non_trivial_bases(info):
|
||
|
# If the type of an attribute changes in a base class, we make references
|
||
|
# to the attribute in the subclass stale.
|
||
|
self.add_dependency(make_trigger(base_info.fullname + '.' + name),
|
||
|
target=make_trigger(info.fullname + '.' + name))
|
||
|
for base_info in non_trivial_bases(info):
|
||
|
for name, node in base_info.names.items():
|
||
|
if self.use_logical_deps():
|
||
|
# Skip logical dependency if an attribute is not overridden. For example,
|
||
|
# in case of:
|
||
|
# class Base:
|
||
|
# x = 1
|
||
|
# y = 2
|
||
|
# class Sub(Base):
|
||
|
# x = 3
|
||
|
# we skip <Base.y> -> <Child.y>, because even if `y` is unannotated it
|
||
|
# doesn't affect precision of Liskov checking.
|
||
|
if name not in info.names:
|
||
|
continue
|
||
|
# __init__ and __new__ can be overridden with different signatures, so no
|
||
|
# logical dependency.
|
||
|
if name in ('__init__', '__new__'):
|
||
|
continue
|
||
|
self.add_dependency(make_trigger(base_info.fullname + '.' + name),
|
||
|
target=make_trigger(info.fullname + '.' + name))
|
||
|
if not self.use_logical_deps():
|
||
|
# These dependencies are only useful for propagating changes --
|
||
|
# they aren't logical dependencies since __init__ and __new__ can be
|
||
|
# overridden with a different signature.
|
||
|
self.add_dependency(make_trigger(base_info.fullname + '.__init__'),
|
||
|
target=make_trigger(info.fullname + '.__init__'))
|
||
|
self.add_dependency(make_trigger(base_info.fullname + '.__new__'),
|
||
|
target=make_trigger(info.fullname + '.__new__'))
|
||
|
# If the set of abstract attributes change, this may invalidate class
|
||
|
# instantiation, or change the generated error message, since Python checks
|
||
|
# class abstract status when creating an instance.
|
||
|
self.add_dependency(make_trigger(base_info.fullname + '.(abstract)'),
|
||
|
target=make_trigger(info.fullname + '.__init__'))
|
||
|
# If the base class abstract attributes change, subclass abstract
|
||
|
# attributes need to be recalculated.
|
||
|
self.add_dependency(make_trigger(base_info.fullname + '.(abstract)'))
|
||
|
|
||
|
def visit_import(self, o: Import) -> None:
|
||
|
for id, as_id in o.ids:
|
||
|
self.add_dependency(make_trigger(id), self.scope.current_target())
|
||
|
|
||
|
def visit_import_from(self, o: ImportFrom) -> None:
|
||
|
if self.use_logical_deps():
|
||
|
# Just importing a name doesn't create a logical dependency.
|
||
|
return
|
||
|
module_id, _ = correct_relative_import(self.scope.current_module_id(),
|
||
|
o.relative,
|
||
|
o.id,
|
||
|
self.is_package_init_file)
|
||
|
self.add_dependency(make_trigger(module_id)) # needed if module is added/removed
|
||
|
for name, as_name in o.names:
|
||
|
self.add_dependency(make_trigger(module_id + '.' + name))
|
||
|
|
||
|
def visit_import_all(self, o: ImportAll) -> None:
|
||
|
module_id, _ = correct_relative_import(self.scope.current_module_id(),
|
||
|
o.relative,
|
||
|
o.id,
|
||
|
self.is_package_init_file)
|
||
|
# The current target needs to be rechecked if anything "significant" changes in the
|
||
|
# target module namespace (as the imported definitions will need to be updated).
|
||
|
self.add_dependency(make_wildcard_trigger(module_id))
|
||
|
|
||
|
def visit_block(self, o: Block) -> None:
|
||
|
if not o.is_unreachable:
|
||
|
super().visit_block(o)
|
||
|
|
||
|
def visit_assignment_stmt(self, o: AssignmentStmt) -> None:
|
||
|
rvalue = o.rvalue
|
||
|
if isinstance(rvalue, CallExpr) and isinstance(rvalue.analyzed, TypeVarExpr):
|
||
|
analyzed = rvalue.analyzed
|
||
|
self.add_type_dependencies(analyzed.upper_bound,
|
||
|
target=make_trigger(analyzed.fullname))
|
||
|
for val in analyzed.values:
|
||
|
self.add_type_dependencies(val, target=make_trigger(analyzed.fullname))
|
||
|
# We need to re-analyze the definition if bound or value is deleted.
|
||
|
super().visit_call_expr(rvalue)
|
||
|
elif isinstance(rvalue, CallExpr) and isinstance(rvalue.analyzed, NamedTupleExpr):
|
||
|
# Depend on types of named tuple items.
|
||
|
info = rvalue.analyzed.info
|
||
|
prefix = f'{self.scope.current_full_target()}.{info.name}'
|
||
|
for name, symnode in info.names.items():
|
||
|
if not name.startswith('_') and isinstance(symnode.node, Var):
|
||
|
typ = symnode.node.type
|
||
|
if typ:
|
||
|
self.add_type_dependencies(typ)
|
||
|
self.add_type_dependencies(typ, target=make_trigger(prefix))
|
||
|
attr_target = make_trigger(f'{prefix}.{name}')
|
||
|
self.add_type_dependencies(typ, target=attr_target)
|
||
|
elif isinstance(rvalue, CallExpr) and isinstance(rvalue.analyzed, TypedDictExpr):
|
||
|
# Depend on the underlying typeddict type
|
||
|
info = rvalue.analyzed.info
|
||
|
assert info.typeddict_type is not None
|
||
|
prefix = f'{self.scope.current_full_target()}.{info.name}'
|
||
|
self.add_type_dependencies(info.typeddict_type, target=make_trigger(prefix))
|
||
|
elif isinstance(rvalue, CallExpr) and isinstance(rvalue.analyzed, EnumCallExpr):
|
||
|
# Enum values are currently not checked, but for future we add the deps on them
|
||
|
for name, symnode in rvalue.analyzed.info.names.items():
|
||
|
if isinstance(symnode.node, Var) and symnode.node.type:
|
||
|
self.add_type_dependencies(symnode.node.type)
|
||
|
elif o.is_alias_def:
|
||
|
assert len(o.lvalues) == 1
|
||
|
lvalue = o.lvalues[0]
|
||
|
assert isinstance(lvalue, NameExpr)
|
||
|
typ = get_proper_type(self.type_map.get(lvalue))
|
||
|
if isinstance(typ, FunctionLike) and typ.is_type_obj():
|
||
|
class_name = typ.type_object().fullname
|
||
|
self.add_dependency(make_trigger(class_name + '.__init__'))
|
||
|
self.add_dependency(make_trigger(class_name + '.__new__'))
|
||
|
if isinstance(rvalue, IndexExpr) and isinstance(rvalue.analyzed, TypeAliasExpr):
|
||
|
self.add_type_dependencies(rvalue.analyzed.type)
|
||
|
elif typ:
|
||
|
self.add_type_dependencies(typ)
|
||
|
else:
|
||
|
# Normal assignment
|
||
|
super().visit_assignment_stmt(o)
|
||
|
for lvalue in o.lvalues:
|
||
|
self.process_lvalue(lvalue)
|
||
|
items = o.lvalues + [rvalue]
|
||
|
for i in range(len(items) - 1):
|
||
|
lvalue = items[i]
|
||
|
rvalue = items[i + 1]
|
||
|
if isinstance(lvalue, TupleExpr):
|
||
|
self.add_attribute_dependency_for_expr(rvalue, '__iter__')
|
||
|
if o.type:
|
||
|
self.add_type_dependencies(o.type)
|
||
|
if self.use_logical_deps() and o.unanalyzed_type is None:
|
||
|
# Special case: for definitions without an explicit type like this:
|
||
|
# x = func(...)
|
||
|
# we add a logical dependency <func> -> <x>, because if `func` is not annotated,
|
||
|
# then it will make all points of use of `x` unchecked.
|
||
|
if (isinstance(rvalue, CallExpr) and isinstance(rvalue.callee, RefExpr)
|
||
|
and rvalue.callee.fullname is not None):
|
||
|
fname: Optional[str] = None
|
||
|
if isinstance(rvalue.callee.node, TypeInfo):
|
||
|
# use actual __init__ as a dependency source
|
||
|
init = rvalue.callee.node.get('__init__')
|
||
|
if init and isinstance(init.node, FuncBase):
|
||
|
fname = init.node.fullname
|
||
|
else:
|
||
|
fname = rvalue.callee.fullname
|
||
|
if fname is None:
|
||
|
return
|
||
|
for lv in o.lvalues:
|
||
|
if isinstance(lv, RefExpr) and lv.fullname and lv.is_new_def:
|
||
|
if lv.kind == LDEF:
|
||
|
return # local definitions don't generate logical deps
|
||
|
self.add_dependency(make_trigger(fname), make_trigger(lv.fullname))
|
||
|
|
||
|
def process_lvalue(self, lvalue: Expression) -> None:
|
||
|
"""Generate additional dependencies for an lvalue."""
|
||
|
if isinstance(lvalue, IndexExpr):
|
||
|
self.add_operator_method_dependency(lvalue.base, '__setitem__')
|
||
|
elif isinstance(lvalue, NameExpr):
|
||
|
if lvalue.kind in (MDEF, GDEF):
|
||
|
# Assignment to an attribute in the class body, or direct assignment to a
|
||
|
# global variable.
|
||
|
lvalue_type = self.get_non_partial_lvalue_type(lvalue)
|
||
|
type_triggers = self.get_type_triggers(lvalue_type)
|
||
|
attr_trigger = make_trigger('{}.{}'.format(self.scope.current_full_target(),
|
||
|
lvalue.name))
|
||
|
for type_trigger in type_triggers:
|
||
|
self.add_dependency(type_trigger, attr_trigger)
|
||
|
elif isinstance(lvalue, MemberExpr):
|
||
|
if self.is_self_member_ref(lvalue) and lvalue.is_new_def:
|
||
|
node = lvalue.node
|
||
|
if isinstance(node, Var):
|
||
|
info = node.info
|
||
|
if info and has_user_bases(info):
|
||
|
# Recheck Liskov for self definitions
|
||
|
self.add_dependency(make_trigger(info.fullname + '.' + lvalue.name))
|
||
|
if lvalue.kind is None:
|
||
|
# Reference to a non-module attribute
|
||
|
if lvalue.expr not in self.type_map:
|
||
|
# Unreachable assignment -> not checked so no dependencies to generate.
|
||
|
return
|
||
|
object_type = self.type_map[lvalue.expr]
|
||
|
lvalue_type = self.get_non_partial_lvalue_type(lvalue)
|
||
|
type_triggers = self.get_type_triggers(lvalue_type)
|
||
|
for attr_trigger in self.attribute_triggers(object_type, lvalue.name):
|
||
|
for type_trigger in type_triggers:
|
||
|
self.add_dependency(type_trigger, attr_trigger)
|
||
|
elif isinstance(lvalue, TupleExpr):
|
||
|
for item in lvalue.items:
|
||
|
self.process_lvalue(item)
|
||
|
elif isinstance(lvalue, StarExpr):
|
||
|
self.process_lvalue(lvalue.expr)
|
||
|
|
||
|
def is_self_member_ref(self, memberexpr: MemberExpr) -> bool:
|
||
|
"""Does memberexpr to refer to an attribute of self?"""
|
||
|
if not isinstance(memberexpr.expr, NameExpr):
|
||
|
return False
|
||
|
node = memberexpr.expr.node
|
||
|
return isinstance(node, Var) and node.is_self
|
||
|
|
||
|
def get_non_partial_lvalue_type(self, lvalue: RefExpr) -> Type:
|
||
|
if lvalue not in self.type_map:
|
||
|
# Likely a block considered unreachable during type checking.
|
||
|
return UninhabitedType()
|
||
|
lvalue_type = get_proper_type(self.type_map[lvalue])
|
||
|
if isinstance(lvalue_type, PartialType):
|
||
|
if isinstance(lvalue.node, Var):
|
||
|
if lvalue.node.type:
|
||
|
lvalue_type = get_proper_type(lvalue.node.type)
|
||
|
else:
|
||
|
lvalue_type = UninhabitedType()
|
||
|
else:
|
||
|
# Probably a secondary, non-definition assignment that doesn't
|
||
|
# result in a non-partial type. We won't be able to infer any
|
||
|
# dependencies from this so just return something. (The first,
|
||
|
# definition assignment with a partial type is handled
|
||
|
# differently, in the semantic analyzer.)
|
||
|
assert not lvalue.is_new_def
|
||
|
return UninhabitedType()
|
||
|
return lvalue_type
|
||
|
|
||
|
def visit_operator_assignment_stmt(self, o: OperatorAssignmentStmt) -> None:
|
||
|
super().visit_operator_assignment_stmt(o)
|
||
|
self.process_lvalue(o.lvalue)
|
||
|
method = op_methods[o.op]
|
||
|
self.add_attribute_dependency_for_expr(o.lvalue, method)
|
||
|
if o.op in ops_with_inplace_method:
|
||
|
inplace_method = '__i' + method[2:]
|
||
|
self.add_attribute_dependency_for_expr(o.lvalue, inplace_method)
|
||
|
|
||
|
def visit_for_stmt(self, o: ForStmt) -> None:
|
||
|
super().visit_for_stmt(o)
|
||
|
if not o.is_async:
|
||
|
# __getitem__ is only used if __iter__ is missing but for simplicity we
|
||
|
# just always depend on both.
|
||
|
self.add_attribute_dependency_for_expr(o.expr, '__iter__')
|
||
|
self.add_attribute_dependency_for_expr(o.expr, '__getitem__')
|
||
|
if o.inferred_iterator_type:
|
||
|
if self.python2:
|
||
|
method = 'next'
|
||
|
else:
|
||
|
method = '__next__'
|
||
|
self.add_attribute_dependency(o.inferred_iterator_type, method)
|
||
|
else:
|
||
|
self.add_attribute_dependency_for_expr(o.expr, '__aiter__')
|
||
|
if o.inferred_iterator_type:
|
||
|
self.add_attribute_dependency(o.inferred_iterator_type, '__anext__')
|
||
|
|
||
|
self.process_lvalue(o.index)
|
||
|
if isinstance(o.index, TupleExpr):
|
||
|
# Process multiple assignment to index variables.
|
||
|
item_type = o.inferred_item_type
|
||
|
if item_type:
|
||
|
# This is similar to above.
|
||
|
self.add_attribute_dependency(item_type, '__iter__')
|
||
|
self.add_attribute_dependency(item_type, '__getitem__')
|
||
|
if o.index_type:
|
||
|
self.add_type_dependencies(o.index_type)
|
||
|
|
||
|
def visit_with_stmt(self, o: WithStmt) -> None:
|
||
|
super().visit_with_stmt(o)
|
||
|
for e in o.expr:
|
||
|
if not o.is_async:
|
||
|
self.add_attribute_dependency_for_expr(e, '__enter__')
|
||
|
self.add_attribute_dependency_for_expr(e, '__exit__')
|
||
|
else:
|
||
|
self.add_attribute_dependency_for_expr(e, '__aenter__')
|
||
|
self.add_attribute_dependency_for_expr(e, '__aexit__')
|
||
|
for typ in o.analyzed_types:
|
||
|
self.add_type_dependencies(typ)
|
||
|
|
||
|
def visit_print_stmt(self, o: PrintStmt) -> None:
|
||
|
super().visit_print_stmt(o)
|
||
|
if o.target:
|
||
|
self.add_attribute_dependency_for_expr(o.target, 'write')
|
||
|
|
||
|
def visit_del_stmt(self, o: DelStmt) -> None:
|
||
|
super().visit_del_stmt(o)
|
||
|
if isinstance(o.expr, IndexExpr):
|
||
|
self.add_attribute_dependency_for_expr(o.expr.base, '__delitem__')
|
||
|
|
||
|
# Expressions
|
||
|
|
||
|
def process_global_ref_expr(self, o: RefExpr) -> None:
|
||
|
if o.fullname is not None:
|
||
|
self.add_dependency(make_trigger(o.fullname))
|
||
|
|
||
|
# If this is a reference to a type, generate a dependency to its
|
||
|
# constructor.
|
||
|
# IDEA: Avoid generating spurious dependencies for except statements,
|
||
|
# class attribute references, etc., if performance is a problem.
|
||
|
typ = get_proper_type(self.type_map.get(o))
|
||
|
if isinstance(typ, FunctionLike) and typ.is_type_obj():
|
||
|
class_name = typ.type_object().fullname
|
||
|
self.add_dependency(make_trigger(class_name + '.__init__'))
|
||
|
self.add_dependency(make_trigger(class_name + '.__new__'))
|
||
|
|
||
|
def visit_name_expr(self, o: NameExpr) -> None:
|
||
|
if o.kind == LDEF:
|
||
|
# We don't track dependencies to local variables, since they
|
||
|
# aren't externally visible.
|
||
|
return
|
||
|
if o.kind == MDEF:
|
||
|
# Direct reference to member is only possible in the scope that
|
||
|
# defined the name, so no dependency is required.
|
||
|
return
|
||
|
self.process_global_ref_expr(o)
|
||
|
|
||
|
def visit_member_expr(self, e: MemberExpr) -> None:
|
||
|
if isinstance(e.expr, RefExpr) and isinstance(e.expr.node, TypeInfo):
|
||
|
# Special case class attribute so that we don't depend on "__init__".
|
||
|
self.add_dependency(make_trigger(e.expr.node.fullname))
|
||
|
else:
|
||
|
super().visit_member_expr(e)
|
||
|
if e.kind is not None:
|
||
|
# Reference to a module attribute
|
||
|
self.process_global_ref_expr(e)
|
||
|
else:
|
||
|
# Reference to a non-module (or missing) attribute
|
||
|
if e.expr not in self.type_map:
|
||
|
# No type available -- this happens for unreachable code. Since it's unreachable,
|
||
|
# it wasn't type checked and we don't need to generate dependencies.
|
||
|
return
|
||
|
if isinstance(e.expr, RefExpr) and isinstance(e.expr.node, MypyFile):
|
||
|
# Special case: reference to a missing module attribute.
|
||
|
self.add_dependency(make_trigger(e.expr.node.fullname + '.' + e.name))
|
||
|
return
|
||
|
typ = get_proper_type(self.type_map[e.expr])
|
||
|
self.add_attribute_dependency(typ, e.name)
|
||
|
if self.use_logical_deps() and isinstance(typ, AnyType):
|
||
|
name = self.get_unimported_fullname(e, typ)
|
||
|
if name is not None:
|
||
|
# Generate a logical dependency from an unimported
|
||
|
# definition (which comes from a missing module).
|
||
|
# Example:
|
||
|
# import missing # "missing" not in build
|
||
|
#
|
||
|
# def g() -> None:
|
||
|
# missing.f() # Generate dependency from "missing.f"
|
||
|
self.add_dependency(make_trigger(name))
|
||
|
|
||
|
def get_unimported_fullname(self, e: MemberExpr, typ: AnyType) -> Optional[str]:
|
||
|
"""If e refers to an unimported definition, infer the fullname of this.
|
||
|
|
||
|
Return None if e doesn't refer to an unimported definition or if we can't
|
||
|
determine the name.
|
||
|
"""
|
||
|
suffix = ''
|
||
|
# Unwrap nested member expression to handle cases like "a.b.c.d" where
|
||
|
# "a.b" is a known reference to an unimported module. Find the base
|
||
|
# reference to an unimported module (such as "a.b") and the name suffix
|
||
|
# (such as "c.d") needed to build a full name.
|
||
|
while typ.type_of_any == TypeOfAny.from_another_any and isinstance(e.expr, MemberExpr):
|
||
|
suffix = '.' + e.name + suffix
|
||
|
e = e.expr
|
||
|
if e.expr not in self.type_map:
|
||
|
return None
|
||
|
obj_type = get_proper_type(self.type_map[e.expr])
|
||
|
if not isinstance(obj_type, AnyType):
|
||
|
# Can't find the base reference to the unimported module.
|
||
|
return None
|
||
|
typ = obj_type
|
||
|
if typ.type_of_any == TypeOfAny.from_unimported_type and typ.missing_import_name:
|
||
|
# Infer the full name of the unimported definition.
|
||
|
return typ.missing_import_name + '.' + e.name + suffix
|
||
|
return None
|
||
|
|
||
|
def visit_super_expr(self, e: SuperExpr) -> None:
|
||
|
# Arguments in "super(C, self)" won't generate useful logical deps.
|
||
|
if not self.use_logical_deps():
|
||
|
super().visit_super_expr(e)
|
||
|
if e.info is not None:
|
||
|
name = e.name
|
||
|
for base in non_trivial_bases(e.info):
|
||
|
self.add_dependency(make_trigger(base.fullname + '.' + name))
|
||
|
if name in base.names:
|
||
|
# No need to depend on further base classes, since we found
|
||
|
# the target. This is safe since if the target gets
|
||
|
# deleted or modified, we'll trigger it.
|
||
|
break
|
||
|
|
||
|
def visit_call_expr(self, e: CallExpr) -> None:
|
||
|
if isinstance(e.callee, RefExpr) and e.callee.fullname == 'builtins.isinstance':
|
||
|
self.process_isinstance_call(e)
|
||
|
else:
|
||
|
super().visit_call_expr(e)
|
||
|
typ = self.type_map.get(e.callee)
|
||
|
if typ is not None:
|
||
|
typ = get_proper_type(typ)
|
||
|
if not isinstance(typ, FunctionLike):
|
||
|
self.add_attribute_dependency(typ, '__call__')
|
||
|
|
||
|
def process_isinstance_call(self, e: CallExpr) -> None:
|
||
|
"""Process "isinstance(...)" in a way to avoid some extra dependencies."""
|
||
|
if len(e.args) == 2:
|
||
|
arg = e.args[1]
|
||
|
if (isinstance(arg, RefExpr)
|
||
|
and arg.kind == GDEF
|
||
|
and isinstance(arg.node, TypeInfo)
|
||
|
and arg.fullname):
|
||
|
# Special case to avoid redundant dependencies from "__init__".
|
||
|
self.add_dependency(make_trigger(arg.fullname))
|
||
|
return
|
||
|
# In uncommon cases generate normal dependencies. These will include
|
||
|
# spurious dependencies, but the performance impact is small.
|
||
|
super().visit_call_expr(e)
|
||
|
|
||
|
def visit_cast_expr(self, e: CastExpr) -> None:
|
||
|
super().visit_cast_expr(e)
|
||
|
self.add_type_dependencies(e.type)
|
||
|
|
||
|
def visit_assert_type_expr(self, e: AssertTypeExpr) -> None:
|
||
|
super().visit_assert_type_expr(e)
|
||
|
self.add_type_dependencies(e.type)
|
||
|
|
||
|
def visit_type_application(self, e: TypeApplication) -> None:
|
||
|
super().visit_type_application(e)
|
||
|
for typ in e.types:
|
||
|
self.add_type_dependencies(typ)
|
||
|
|
||
|
def visit_index_expr(self, e: IndexExpr) -> None:
|
||
|
super().visit_index_expr(e)
|
||
|
self.add_operator_method_dependency(e.base, '__getitem__')
|
||
|
|
||
|
def visit_unary_expr(self, e: UnaryExpr) -> None:
|
||
|
super().visit_unary_expr(e)
|
||
|
if e.op not in unary_op_methods:
|
||
|
return
|
||
|
method = unary_op_methods[e.op]
|
||
|
self.add_operator_method_dependency(e.expr, method)
|
||
|
|
||
|
def visit_op_expr(self, e: OpExpr) -> None:
|
||
|
super().visit_op_expr(e)
|
||
|
self.process_binary_op(e.op, e.left, e.right)
|
||
|
|
||
|
def visit_comparison_expr(self, e: ComparisonExpr) -> None:
|
||
|
super().visit_comparison_expr(e)
|
||
|
for i, op in enumerate(e.operators):
|
||
|
left = e.operands[i]
|
||
|
right = e.operands[i + 1]
|
||
|
self.process_binary_op(op, left, right)
|
||
|
if self.python2 and op in ('==', '!=', '<', '<=', '>', '>='):
|
||
|
self.add_operator_method_dependency(left, '__cmp__')
|
||
|
self.add_operator_method_dependency(right, '__cmp__')
|
||
|
|
||
|
def process_binary_op(self, op: str, left: Expression, right: Expression) -> None:
|
||
|
method = op_methods.get(op)
|
||
|
if method:
|
||
|
if op == 'in':
|
||
|
self.add_operator_method_dependency(right, method)
|
||
|
else:
|
||
|
self.add_operator_method_dependency(left, method)
|
||
|
rev_method = reverse_op_methods.get(method)
|
||
|
if rev_method:
|
||
|
self.add_operator_method_dependency(right, rev_method)
|
||
|
|
||
|
def add_operator_method_dependency(self, e: Expression, method: str) -> None:
|
||
|
typ = get_proper_type(self.type_map.get(e))
|
||
|
if typ is not None:
|
||
|
self.add_operator_method_dependency_for_type(typ, method)
|
||
|
|
||
|
def add_operator_method_dependency_for_type(self, typ: ProperType, method: str) -> None:
|
||
|
# Note that operator methods can't be (non-metaclass) methods of type objects
|
||
|
# (that is, TypeType objects or Callables representing a type).
|
||
|
if isinstance(typ, TypeVarType):
|
||
|
typ = get_proper_type(typ.upper_bound)
|
||
|
if isinstance(typ, TupleType):
|
||
|
typ = typ.partial_fallback
|
||
|
if isinstance(typ, Instance):
|
||
|
trigger = make_trigger(typ.type.fullname + '.' + method)
|
||
|
self.add_dependency(trigger)
|
||
|
elif isinstance(typ, UnionType):
|
||
|
for item in typ.items:
|
||
|
self.add_operator_method_dependency_for_type(get_proper_type(item), method)
|
||
|
elif isinstance(typ, FunctionLike) and typ.is_type_obj():
|
||
|
self.add_operator_method_dependency_for_type(typ.fallback, method)
|
||
|
elif isinstance(typ, TypeType):
|
||
|
if isinstance(typ.item, Instance) and typ.item.type.metaclass_type is not None:
|
||
|
self.add_operator_method_dependency_for_type(typ.item.type.metaclass_type, method)
|
||
|
|
||
|
def visit_generator_expr(self, e: GeneratorExpr) -> None:
|
||
|
super().visit_generator_expr(e)
|
||
|
for seq in e.sequences:
|
||
|
self.add_iter_dependency(seq)
|
||
|
|
||
|
def visit_dictionary_comprehension(self, e: DictionaryComprehension) -> None:
|
||
|
super().visit_dictionary_comprehension(e)
|
||
|
for seq in e.sequences:
|
||
|
self.add_iter_dependency(seq)
|
||
|
|
||
|
def visit_star_expr(self, e: StarExpr) -> None:
|
||
|
super().visit_star_expr(e)
|
||
|
self.add_iter_dependency(e.expr)
|
||
|
|
||
|
def visit_yield_from_expr(self, e: YieldFromExpr) -> None:
|
||
|
super().visit_yield_from_expr(e)
|
||
|
self.add_iter_dependency(e.expr)
|
||
|
|
||
|
def visit_await_expr(self, e: AwaitExpr) -> None:
|
||
|
super().visit_await_expr(e)
|
||
|
self.add_attribute_dependency_for_expr(e.expr, '__await__')
|
||
|
|
||
|
# Helpers
|
||
|
|
||
|
def add_type_alias_deps(self, target: str) -> None:
|
||
|
# Type aliases are special, because some of the dependencies are calculated
|
||
|
# in semanal.py, before they are expanded.
|
||
|
if target in self.alias_deps:
|
||
|
for alias in self.alias_deps[target]:
|
||
|
self.add_dependency(make_trigger(alias))
|
||
|
|
||
|
def add_dependency(self, trigger: str, target: Optional[str] = None) -> None:
|
||
|
"""Add dependency from trigger to a target.
|
||
|
|
||
|
If the target is not given explicitly, use the current target.
|
||
|
"""
|
||
|
if trigger.startswith(('<builtins.', '<typing.',
|
||
|
'<mypy_extensions.', '<typing_extensions.')):
|
||
|
# Don't track dependencies to certain library modules to keep the size of
|
||
|
# the dependencies manageable. These dependencies should only
|
||
|
# change on mypy version updates, which will require a full rebuild
|
||
|
# anyway.
|
||
|
return
|
||
|
if target is None:
|
||
|
target = self.scope.current_target()
|
||
|
self.map.setdefault(trigger, set()).add(target)
|
||
|
|
||
|
def add_type_dependencies(self, typ: Type, target: Optional[str] = None) -> None:
|
||
|
"""Add dependencies to all components of a type.
|
||
|
|
||
|
Args:
|
||
|
target: If not None, override the default (current) target of the
|
||
|
generated dependency.
|
||
|
"""
|
||
|
for trigger in self.get_type_triggers(typ):
|
||
|
self.add_dependency(trigger, target)
|
||
|
|
||
|
def add_attribute_dependency(self, typ: Type, name: str) -> None:
|
||
|
"""Add dependencies for accessing a named attribute of a type."""
|
||
|
targets = self.attribute_triggers(typ, name)
|
||
|
for target in targets:
|
||
|
self.add_dependency(target)
|
||
|
|
||
|
def attribute_triggers(self, typ: Type, name: str) -> List[str]:
|
||
|
"""Return all triggers associated with the attribute of a type."""
|
||
|
typ = get_proper_type(typ)
|
||
|
if isinstance(typ, TypeVarType):
|
||
|
typ = get_proper_type(typ.upper_bound)
|
||
|
if isinstance(typ, TupleType):
|
||
|
typ = typ.partial_fallback
|
||
|
if isinstance(typ, Instance):
|
||
|
member = f'{typ.type.fullname}.{name}'
|
||
|
return [make_trigger(member)]
|
||
|
elif isinstance(typ, FunctionLike) and typ.is_type_obj():
|
||
|
member = f'{typ.type_object().fullname}.{name}'
|
||
|
triggers = [make_trigger(member)]
|
||
|
triggers.extend(self.attribute_triggers(typ.fallback, name))
|
||
|
return triggers
|
||
|
elif isinstance(typ, UnionType):
|
||
|
targets = []
|
||
|
for item in typ.items:
|
||
|
targets.extend(self.attribute_triggers(item, name))
|
||
|
return targets
|
||
|
elif isinstance(typ, TypeType):
|
||
|
triggers = self.attribute_triggers(typ.item, name)
|
||
|
if isinstance(typ.item, Instance) and typ.item.type.metaclass_type is not None:
|
||
|
triggers.append(make_trigger('%s.%s' %
|
||
|
(typ.item.type.metaclass_type.type.fullname,
|
||
|
name)))
|
||
|
return triggers
|
||
|
else:
|
||
|
return []
|
||
|
|
||
|
def add_attribute_dependency_for_expr(self, e: Expression, name: str) -> None:
|
||
|
typ = self.type_map.get(e)
|
||
|
if typ is not None:
|
||
|
self.add_attribute_dependency(typ, name)
|
||
|
|
||
|
def add_iter_dependency(self, node: Expression) -> None:
|
||
|
typ = self.type_map.get(node)
|
||
|
if typ:
|
||
|
self.add_attribute_dependency(typ, '__iter__')
|
||
|
|
||
|
def use_logical_deps(self) -> bool:
|
||
|
return self.options is not None and self.options.logical_deps
|
||
|
|
||
|
def get_type_triggers(self, typ: Type) -> List[str]:
|
||
|
return get_type_triggers(typ, self.use_logical_deps())
|
||
|
|
||
|
|
||
|
def get_type_triggers(typ: Type, use_logical_deps: bool) -> List[str]:
|
||
|
"""Return all triggers that correspond to a type becoming stale."""
|
||
|
return typ.accept(TypeTriggersVisitor(use_logical_deps))
|
||
|
|
||
|
|
||
|
class TypeTriggersVisitor(TypeVisitor[List[str]]):
|
||
|
def __init__(self, use_logical_deps: bool) -> None:
|
||
|
self.deps: List[str] = []
|
||
|
self.use_logical_deps = use_logical_deps
|
||
|
|
||
|
def get_type_triggers(self, typ: Type) -> List[str]:
|
||
|
return get_type_triggers(typ, self.use_logical_deps)
|
||
|
|
||
|
def visit_instance(self, typ: Instance) -> List[str]:
|
||
|
trigger = make_trigger(typ.type.fullname)
|
||
|
triggers = [trigger]
|
||
|
for arg in typ.args:
|
||
|
triggers.extend(self.get_type_triggers(arg))
|
||
|
if typ.last_known_value:
|
||
|
triggers.extend(self.get_type_triggers(typ.last_known_value))
|
||
|
return triggers
|
||
|
|
||
|
def visit_type_alias_type(self, typ: TypeAliasType) -> List[str]:
|
||
|
assert typ.alias is not None
|
||
|
trigger = make_trigger(typ.alias.fullname)
|
||
|
triggers = [trigger]
|
||
|
for arg in typ.args:
|
||
|
triggers.extend(self.get_type_triggers(arg))
|
||
|
# TODO: Add guard for infinite recursion here. Moreover, now that type aliases
|
||
|
# are its own kind of types we can simplify the logic to rely on intermediate
|
||
|
# dependencies (like for instance types).
|
||
|
triggers.extend(self.get_type_triggers(typ.alias.target))
|
||
|
return triggers
|
||
|
|
||
|
def visit_any(self, typ: AnyType) -> List[str]:
|
||
|
if typ.missing_import_name is not None:
|
||
|
return [make_trigger(typ.missing_import_name)]
|
||
|
return []
|
||
|
|
||
|
def visit_none_type(self, typ: NoneType) -> List[str]:
|
||
|
return []
|
||
|
|
||
|
def visit_callable_type(self, typ: CallableType) -> List[str]:
|
||
|
triggers = []
|
||
|
for arg in typ.arg_types:
|
||
|
triggers.extend(self.get_type_triggers(arg))
|
||
|
triggers.extend(self.get_type_triggers(typ.ret_type))
|
||
|
# fallback is a metaclass type for class objects, and is
|
||
|
# processed separately.
|
||
|
return triggers
|
||
|
|
||
|
def visit_overloaded(self, typ: Overloaded) -> List[str]:
|
||
|
triggers = []
|
||
|
for item in typ.items:
|
||
|
triggers.extend(self.get_type_triggers(item))
|
||
|
return triggers
|
||
|
|
||
|
def visit_erased_type(self, t: ErasedType) -> List[str]:
|
||
|
# This type should exist only temporarily during type inference
|
||
|
assert False, "Should not see an erased type here"
|
||
|
|
||
|
def visit_deleted_type(self, typ: DeletedType) -> List[str]:
|
||
|
return []
|
||
|
|
||
|
def visit_partial_type(self, typ: PartialType) -> List[str]:
|
||
|
assert False, "Should not see a partial type here"
|
||
|
|
||
|
def visit_tuple_type(self, typ: TupleType) -> List[str]:
|
||
|
triggers = []
|
||
|
for item in typ.items:
|
||
|
triggers.extend(self.get_type_triggers(item))
|
||
|
triggers.extend(self.get_type_triggers(typ.partial_fallback))
|
||
|
return triggers
|
||
|
|
||
|
def visit_type_type(self, typ: TypeType) -> List[str]:
|
||
|
triggers = self.get_type_triggers(typ.item)
|
||
|
if not self.use_logical_deps:
|
||
|
old_triggers = triggers[:]
|
||
|
for trigger in old_triggers:
|
||
|
triggers.append(trigger.rstrip('>') + '.__init__>')
|
||
|
triggers.append(trigger.rstrip('>') + '.__new__>')
|
||
|
return triggers
|
||
|
|
||
|
def visit_type_var(self, typ: TypeVarType) -> List[str]:
|
||
|
triggers = []
|
||
|
if typ.fullname:
|
||
|
triggers.append(make_trigger(typ.fullname))
|
||
|
if typ.upper_bound:
|
||
|
triggers.extend(self.get_type_triggers(typ.upper_bound))
|
||
|
for val in typ.values:
|
||
|
triggers.extend(self.get_type_triggers(val))
|
||
|
return triggers
|
||
|
|
||
|
def visit_param_spec(self, typ: ParamSpecType) -> List[str]:
|
||
|
triggers = []
|
||
|
if typ.fullname:
|
||
|
triggers.append(make_trigger(typ.fullname))
|
||
|
triggers.extend(self.get_type_triggers(typ.upper_bound))
|
||
|
return triggers
|
||
|
|
||
|
def visit_type_var_tuple(self, typ: TypeVarTupleType) -> List[str]:
|
||
|
triggers = []
|
||
|
if typ.fullname:
|
||
|
triggers.append(make_trigger(typ.fullname))
|
||
|
triggers.extend(self.get_type_triggers(typ.upper_bound))
|
||
|
return triggers
|
||
|
|
||
|
def visit_unpack_type(self, typ: UnpackType) -> List[str]:
|
||
|
return typ.type.accept(self)
|
||
|
|
||
|
def visit_parameters(self, typ: Parameters) -> List[str]:
|
||
|
triggers = []
|
||
|
for arg in typ.arg_types:
|
||
|
triggers.extend(self.get_type_triggers(arg))
|
||
|
return triggers
|
||
|
|
||
|
def visit_typeddict_type(self, typ: TypedDictType) -> List[str]:
|
||
|
triggers = []
|
||
|
for item in typ.items.values():
|
||
|
triggers.extend(self.get_type_triggers(item))
|
||
|
triggers.extend(self.get_type_triggers(typ.fallback))
|
||
|
return triggers
|
||
|
|
||
|
def visit_literal_type(self, typ: LiteralType) -> List[str]:
|
||
|
return self.get_type_triggers(typ.fallback)
|
||
|
|
||
|
def visit_unbound_type(self, typ: UnboundType) -> List[str]:
|
||
|
return []
|
||
|
|
||
|
def visit_uninhabited_type(self, typ: UninhabitedType) -> List[str]:
|
||
|
return []
|
||
|
|
||
|
def visit_union_type(self, typ: UnionType) -> List[str]:
|
||
|
triggers = []
|
||
|
for item in typ.items:
|
||
|
triggers.extend(self.get_type_triggers(item))
|
||
|
return triggers
|
||
|
|
||
|
|
||
|
def merge_dependencies(new_deps: Dict[str, Set[str]],
|
||
|
deps: Dict[str, Set[str]]) -> None:
|
||
|
for trigger, targets in new_deps.items():
|
||
|
deps.setdefault(trigger, set()).update(targets)
|
||
|
|
||
|
|
||
|
def non_trivial_bases(info: TypeInfo) -> List[TypeInfo]:
|
||
|
return [base for base in info.mro[1:]
|
||
|
if base.fullname != 'builtins.object']
|
||
|
|
||
|
|
||
|
def has_user_bases(info: TypeInfo) -> bool:
|
||
|
return any(base.module_name not in ('builtins', 'typing', 'enum') for base in info.mro[1:])
|
||
|
|
||
|
|
||
|
def dump_all_dependencies(modules: Dict[str, MypyFile],
|
||
|
type_map: Dict[Expression, Type],
|
||
|
python_version: Tuple[int, int],
|
||
|
options: Options) -> None:
|
||
|
"""Generate dependencies for all interesting modules and print them to stdout."""
|
||
|
all_deps: Dict[str, Set[str]] = {}
|
||
|
for id, node in modules.items():
|
||
|
# Uncomment for debugging:
|
||
|
# print('processing', id)
|
||
|
if id in ('builtins', 'typing') or '/typeshed/' in node.path:
|
||
|
continue
|
||
|
assert id == node.fullname
|
||
|
deps = get_dependencies(node, type_map, python_version, options)
|
||
|
for trigger, targets in deps.items():
|
||
|
all_deps.setdefault(trigger, set()).update(targets)
|
||
|
TypeState.add_all_protocol_deps(all_deps)
|
||
|
|
||
|
for trigger, targets in sorted(all_deps.items(), key=lambda x: x[0]):
|
||
|
print(trigger)
|
||
|
for target in sorted(targets):
|
||
|
print(f' {target}')
|