diff --git a/mypy/nodes.py b/mypy/nodes.py index c6c0c412be45..6375d500a8a3 100644 --- a/mypy/nodes.py +++ b/mypy/nodes.py @@ -1993,6 +1993,10 @@ class is generic then it will be a type constructor of higher kind. # needed during the semantic passes.) replaced = None # type: TypeInfo + # This is a dictionary that will be serialized and un-serialized as is. + # It is useful for plugins to add their data to save in the cache. + metadata = None # type: Dict[str, JsonDict] + FLAGS = [ 'is_abstract', 'is_enum', 'fallback_to_any', 'is_named_tuple', 'is_newtype', 'is_protocol', 'runtime_protocol' @@ -2016,6 +2020,7 @@ def __init__(self, names: 'SymbolTable', defn: ClassDef, module_name: str) -> No self._cache = set() self._cache_proper = set() self.add_type_vars() + self.metadata = {} def add_type_vars(self) -> None: if self.defn.type_vars: @@ -2218,6 +2223,7 @@ def serialize(self) -> JsonDict: 'typeddict_type': None if self.typeddict_type is None else self.typeddict_type.serialize(), 'flags': get_flags(self, TypeInfo.FLAGS), + 'metadata': self.metadata, } return data @@ -2244,6 +2250,7 @@ def deserialize(cls, data: JsonDict) -> 'TypeInfo': else mypy.types.TupleType.deserialize(data['tuple_type'])) ti.typeddict_type = (None if data['typeddict_type'] is None else mypy.types.TypedDictType.deserialize(data['typeddict_type'])) + ti.metadata = data['metadata'] set_flags(ti, data['flags']) return ti @@ -2612,3 +2619,10 @@ def check_arg_names(names: Sequence[Optional[str]], nodes: List[T], fail: Callab fail("Duplicate argument '{}' in {}".format(name, description), node) break seen_names.add(name) + + +def is_class_var(expr: NameExpr) -> bool: + """Return whether the expression is ClassVar[...]""" + if isinstance(expr.node, Var): + return expr.node.is_classvar + return False diff --git a/mypy/plugin.py b/mypy/plugin.py index 4ffa9395afc5..3e1e2bea3011 100644 --- a/mypy/plugin.py +++ b/mypy/plugin.py @@ -1,12 +1,17 @@ """Plugin system for extending mypy.""" -from collections import OrderedDict from abc import abstractmethod +from functools import partial from typing import Callable, List, Tuple, Optional, NamedTuple, TypeVar -from mypy.nodes import Expression, StrExpr, IntExpr, UnaryExpr, Context, DictExpr, ClassDef +import mypy.plugins.attrs +from mypy.nodes import ( + Expression, StrExpr, IntExpr, UnaryExpr, Context, DictExpr, ClassDef, + TypeInfo, SymbolTableNode +) +from mypy.tvar_scope import TypeVarScope from mypy.types import ( - Type, Instance, CallableType, TypedDictType, UnionType, NoneTyp, FunctionLike, TypeVarType, + Type, Instance, CallableType, TypedDictType, UnionType, NoneTyp, TypeVarType, AnyType, TypeList, UnboundType, TypeOfAny ) from mypy.messages import MessageBuilder @@ -56,6 +61,9 @@ def named_generic_type(self, name: str, args: List[Type]) -> Instance: class SemanticAnalyzerPluginInterface: """Interface for accessing semantic analyzer functionality in plugins.""" + options = None # type: Options + msg = None # type: MessageBuilder + @abstractmethod def named_type(self, qualified_name: str, args: Optional[List[Type]] = None) -> Instance: raise NotImplementedError @@ -69,6 +77,22 @@ def fail(self, msg: str, ctx: Context, serious: bool = False, *, blocker: bool = False) -> None: raise NotImplementedError + @abstractmethod + def anal_type(self, t: Type, *, + tvar_scope: Optional[TypeVarScope] = None, + allow_tuple_literal: bool = False, + aliasing: bool = False, + third_pass: bool = False) -> Type: + raise NotImplementedError + + @abstractmethod + def class_type(self, info: TypeInfo) -> Type: + raise NotImplementedError + + @abstractmethod + def lookup_fully_qualified(self, name: str) -> SymbolTableNode: + raise NotImplementedError + # A context for a function hook that infers the return type of a function with # a special signature. @@ -262,6 +286,17 @@ def get_method_hook(self, fullname: str return int_pow_callback return None + def get_class_decorator_hook(self, fullname: str + ) -> Optional[Callable[[ClassDefContext], None]]: + if fullname in mypy.plugins.attrs.attr_class_makers: + return mypy.plugins.attrs.attr_class_maker_callback + elif fullname in mypy.plugins.attrs.attr_dataclass_makers: + return partial( + mypy.plugins.attrs.attr_class_maker_callback, + auto_attribs_default=True + ) + return None + def open_callback(ctx: FunctionContext) -> Type: """Infer a better return type for 'open'. diff --git a/mypy/plugins/__init__.py b/mypy/plugins/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/mypy/plugins/attrs.py b/mypy/plugins/attrs.py new file mode 100644 index 000000000000..5ba4cd2c236b --- /dev/null +++ b/mypy/plugins/attrs.py @@ -0,0 +1,499 @@ +"""Plugin for supporting the attrs library (http://www.attrs.org)""" +from collections import OrderedDict +from typing import Optional, Dict, List, cast, Tuple, Iterable + +import mypy.plugin # To avoid circular imports. +from mypy.exprtotype import expr_to_unanalyzed_type, TypeTranslationError +from mypy.nodes import ( + Context, Argument, Var, ARG_OPT, ARG_POS, TypeInfo, AssignmentStmt, + TupleExpr, ListExpr, NameExpr, CallExpr, RefExpr, FuncBase, + is_class_var, TempNode, Decorator, MemberExpr, Expression, FuncDef, Block, + PassStmt, SymbolTableNode, MDEF, JsonDict +) +from mypy.types import ( + Type, AnyType, TypeOfAny, CallableType, NoneTyp, TypeVarDef, TypeVarType, + Overloaded, Instance +) +from mypy.typevars import fill_typevars + + +# The names of the different functions that create classes or arguments. +attr_class_makers = { + 'attr.s', + 'attr.attrs', + 'attr.attributes', +} +attr_dataclass_makers = { + 'attr.dataclass', +} +attr_attrib_makers = { + 'attr.ib', + 'attr.attrib', + 'attr.attr', +} + + +class Attribute: + """The value of an attr.ib() call.""" + + def __init__(self, name: str, info: TypeInfo, + has_default: bool, init: bool, converter_name: Optional[str], + context: Context) -> None: + self.name = name + self.info = info + self.has_default = has_default + self.init = init + self.converter_name = converter_name + self.context = context + + def argument(self, ctx: 'mypy.plugin.ClassDefContext') -> Argument: + """Return this attribute as an argument to __init__.""" + assert self.init + init_type = self.info[self.name].type + + if self.converter_name: + # When a converter is set the init_type is overriden by the first argument + # of the converter method. + converter = ctx.api.lookup_fully_qualified(self.converter_name) + if (converter + and converter.type + and isinstance(converter.type, CallableType) + and converter.type.arg_types): + init_type = converter.type.arg_types[0] + else: + init_type = None + + if init_type is None: + if ctx.api.options.disallow_untyped_defs: + # This is a compromise. If you don't have a type here then the + # __init__ will be untyped. But since the __init__ is added it's + # pointing at the decorator. So instead we also show the error in the + # assignment, which is where you would fix the issue. + node = self.info[self.name].node + assert node is not None + ctx.api.msg.need_annotation_for_var(node, self.context) + + # Convert type not set to Any. + init_type = AnyType(TypeOfAny.unannotated) + + # Attrs removes leading underscores when creating the __init__ arguments. + return Argument(Var(self.name.lstrip("_"), init_type), init_type, + None, + ARG_OPT if self.has_default else ARG_POS) + + def serialize(self) -> JsonDict: + """Serialize this object so it can be saved and restored.""" + return { + 'name': self.name, + 'has_default': self.has_default, + 'init': self.init, + 'converter_name': self.converter_name, + 'context_line': self.context.line, + 'context_column': self.context.column, + } + + @classmethod + def deserialize(cls, info: TypeInfo, data: JsonDict) -> 'Attribute': + """Return the Attribute that was serialized.""" + return Attribute( + data['name'], + info, + data['has_default'], + data['init'], + data['converter_name'], + Context(line=data['context_line'], column=data['context_column']) + ) + + +def attr_class_maker_callback(ctx: 'mypy.plugin.ClassDefContext', + auto_attribs_default: bool = False) -> None: + """Add necessary dunder methods to classes decorated with attr.s. + + attrs is a package that lets you define classes without writing dull boilerplate code. + + At a quick glance, the decorator searches the class body for assignments of `attr.ib`s (or + annotated variables if auto_attribs=True), then depending on how the decorator is called, + it will add an __init__ or all the __cmp__ methods. For frozen=True it will turn the attrs + into properties. + + See http://www.attrs.org/en/stable/how-does-it-work.html for information on how attrs works. + """ + info = ctx.cls.info + + init = _get_decorator_bool_argument(ctx, 'init', True) + frozen = _get_decorator_bool_argument(ctx, 'frozen', False) + cmp = _get_decorator_bool_argument(ctx, 'cmp', True) + auto_attribs = _get_decorator_bool_argument(ctx, 'auto_attribs', auto_attribs_default) + + if ctx.api.options.python_version[0] < 3: + if auto_attribs: + ctx.api.fail("auto_attribs is not supported in Python 2", ctx.reason) + return + if not info.defn.base_type_exprs: + # Note: This will not catch subclassing old-style classes. + ctx.api.fail("attrs only works with new-style classes", info.defn) + return + + attributes = _analyze_class(ctx, auto_attribs) + + adder = MethodAdder(info, ctx.api.named_type('__builtins__.function')) + if init: + _add_init(ctx, attributes, adder) + if cmp: + _add_cmp(ctx, adder) + if frozen: + _make_frozen(ctx, attributes) + + +def _analyze_class(ctx: 'mypy.plugin.ClassDefContext', auto_attribs: bool) -> List[Attribute]: + """Analyze the class body of an attr maker, its parents, and return the Attributes found.""" + own_attrs = OrderedDict() # type: OrderedDict[str, Attribute] + # Walk the body looking for assignments and decorators. + for stmt in ctx.cls.defs.body: + if isinstance(stmt, AssignmentStmt): + for attr in _attributes_from_assignment(ctx, stmt, auto_attribs): + # When attrs are defined twice in the same body we want to use the 2nd definition + # in the 2nd location. So remove it from the OrderedDict. + # Unless it's auto_attribs in which case we want the 2nd definition in the + # 1st location. + if not auto_attribs and attr.name in own_attrs: + del own_attrs[attr.name] + own_attrs[attr.name] = attr + elif isinstance(stmt, Decorator): + _cleanup_decorator(stmt, own_attrs) + + # Traverse the MRO and collect attributes from the parents. + taken_attr_names = set(own_attrs) + super_attrs = [] + for super_info in ctx.cls.info.mro[1:-1]: + if 'attrs' in super_info.metadata: + for data in super_info.metadata['attrs']['attributes']: + # Only add an attribute if it hasn't been defined before. This + # allows for overwriting attribute definitions by subclassing. + if data['name'] not in taken_attr_names: + a = Attribute.deserialize(super_info, data) + super_attrs.append(a) + taken_attr_names.add(a.name) + attributes = super_attrs + list(own_attrs.values()) + + # Save the attributes so that subclasses can reuse them. + ctx.cls.info.metadata['attrs'] = {'attributes': [attr.serialize() for attr in attributes]} + + # Check the init args for correct default-ness. Note: This has to be done after all the + # attributes for all classes have been read, because subclasses can override parents. + last_default = False + for attribute in attributes: + if attribute.init and not attribute.has_default and last_default: + ctx.api.fail( + "Non-default attributes not allowed after default attributes.", + attribute.context) + last_default = attribute.has_default + + return attributes + + +def _attributes_from_assignment(ctx: 'mypy.plugin.ClassDefContext', + stmt: AssignmentStmt, auto_attribs: bool) -> Iterable[Attribute]: + """Return Attribute objects that are created by this assignment. + + The assignments can look like this: + x = attr.ib() + x = y = attr.ib() + x, y = attr.ib(), attr.ib() + or if auto_attribs is enabled also like this: + x: type + x: type = default_value + """ + for lvalue in stmt.lvalues: + lvalues, rvalues = _parse_assignments(lvalue, stmt) + + if len(lvalues) != len(rvalues): + # This means we have some assignment that isn't 1 to 1. + # It can't be an attrib. + continue + + for lhs, rvalue in zip(lvalues, rvalues): + # Check if the right hand side is a call to an attribute maker. + if (isinstance(rvalue, CallExpr) + and isinstance(rvalue.callee, RefExpr) + and rvalue.callee.fullname in attr_attrib_makers): + attr = _attribute_from_attrib_maker(ctx, auto_attribs, lhs, rvalue, stmt) + if attr: + yield attr + elif auto_attribs and stmt.type and stmt.new_syntax and not is_class_var(lhs): + yield _attribute_from_auto_attrib(ctx, lhs, rvalue, stmt) + + +def _cleanup_decorator(stmt: Decorator, attr_map: Dict[str, Attribute]) -> None: + """Handle decorators in class bodies. + + `x.default` will set a default value on x + `x.validator` and `x.default` will get removed to avoid throwing a type error. + """ + remove_me = [] + for func_decorator in stmt.decorators: + if (isinstance(func_decorator, MemberExpr) + and isinstance(func_decorator.expr, NameExpr) + and func_decorator.expr.name in attr_map): + + if func_decorator.name == 'default': + attr_map[func_decorator.expr.name].has_default = True + + if func_decorator.name in ('default', 'validator'): + # These are decorators on the attrib object that only exist during + # class creation time. In order to not trigger a type error later we + # just remove them. This might leave us with a Decorator with no + # decorators (Emperor's new clothes?) + # TODO: It would be nice to type-check these rather than remove them. + # default should be Callable[[], T] + # validator should be Callable[[Any, 'Attribute', T], Any] + # where T is the type of the attribute. + remove_me.append(func_decorator) + for dec in remove_me: + stmt.decorators.remove(dec) + + +def _attribute_from_auto_attrib(ctx: 'mypy.plugin.ClassDefContext', + lhs: NameExpr, + rvalue: Expression, + stmt: AssignmentStmt) -> Attribute: + """Return an Attribute for a new type assignment.""" + # `x: int` (without equal sign) assigns rvalue to TempNode(AnyType()) + has_rhs = not isinstance(rvalue, TempNode) + return Attribute(lhs.name, ctx.cls.info, has_rhs, True, None, stmt) + + +def _attribute_from_attrib_maker(ctx: 'mypy.plugin.ClassDefContext', + auto_attribs: bool, + lhs: NameExpr, + rvalue: CallExpr, + stmt: AssignmentStmt) -> Optional[Attribute]: + """Return an Attribute from the assignment or None if you can't make one.""" + if auto_attribs and not stmt.new_syntax: + # auto_attribs requires an annotation on *every* attr.ib. + assert lhs.node is not None + ctx.api.msg.need_annotation_for_var(lhs.node, stmt) + return None + + if len(stmt.lvalues) > 1: + ctx.api.fail("Too many names for one attribute", stmt) + return None + + # This is the type that belongs in the __init__ method for this attrib. + init_type = stmt.type + + # Read all the arguments from the call. + init = _get_bool_argument(ctx, rvalue, 'init', True) + # TODO: Check for attr.NOTHING + attr_has_default = bool(_get_argument(rvalue, 'default')) + + # If the type isn't set through annotation but is passed through `type=` use that. + type_arg = _get_argument(rvalue, 'type') + if type_arg and not init_type: + try: + un_type = expr_to_unanalyzed_type(type_arg) + except TypeTranslationError: + ctx.api.fail('Invalid argument to type', type_arg) + else: + init_type = ctx.api.anal_type(un_type) + if init_type and isinstance(lhs.node, Var) and not lhs.node.type: + # If there is no annotation, add one. + lhs.node.type = init_type + lhs.is_inferred_def = False + + # Note: convert is deprecated but works the same as converter. + converter = _get_argument(rvalue, 'converter') + convert = _get_argument(rvalue, 'convert') + if convert and converter: + ctx.api.fail("Can't pass both `convert` and `converter`.", rvalue) + elif convert: + ctx.api.fail("convert is deprecated, use converter", rvalue) + converter = convert + converter_name = _get_converter_name(converter) + + return Attribute(lhs.name, ctx.cls.info, attr_has_default, init, converter_name, stmt) + + +def _get_converter_name(converter: Optional[Expression]) -> Optional[str]: + """Return the full name of the converter if it exists and is a simple function.""" + # TODO: Support complex converters, e.g. lambdas, calls, etc. + if (converter + and isinstance(converter, RefExpr) + and converter.node + and isinstance(converter.node, FuncBase) + and converter.node.type + and isinstance(converter.node.type, CallableType) + and converter.node.type.arg_types): + return converter.node.fullname() + return None + + +def _parse_assignments( + lvalue: Expression, + stmt: AssignmentStmt) -> Tuple[List[NameExpr], List[Expression]]: + """Convert a possibly complex assignment expression into lists of lvalues and rvalues.""" + lvalues = [] # type: List[NameExpr] + rvalues = [] # type: List[Expression] + if isinstance(lvalue, (TupleExpr, ListExpr)): + if all(isinstance(item, NameExpr) for item in lvalue.items): + lvalues = cast(List[NameExpr], lvalue.items) + if isinstance(stmt.rvalue, (TupleExpr, ListExpr)): + rvalues = stmt.rvalue.items + elif isinstance(lvalue, NameExpr): + lvalues = [lvalue] + rvalues = [stmt.rvalue] + return lvalues, rvalues + + +def _add_cmp(ctx: 'mypy.plugin.ClassDefContext', adder: 'MethodAdder') -> None: + """Generate all the cmp methods for this class.""" + # For __ne__ and __eq__ the type is: + # def __ne__(self, other: object) -> bool + bool_type = ctx.api.named_type('__builtins__.bool') + object_type = ctx.api.named_type('__builtins__.object') + args = [Argument(Var('other', object_type), object_type, None, ARG_POS)] + for method in ['__ne__', '__eq__']: + adder.add_method(method, args, bool_type) + # For the rest we use: + # AT = TypeVar('AT') + # def __lt__(self: AT, other: AT) -> bool + # This way comparisons with subclasses will work correctly. + tvd = TypeVarDef('AT', 'AT', 1, [], object_type) + tvd_type = TypeVarType(tvd) + args = [Argument(Var('other', tvd_type), tvd_type, None, ARG_POS)] + for method in ['__lt__', '__le__', '__gt__', '__ge__']: + adder.add_method(method, args, bool_type, self_type=tvd_type, tvd=tvd) + + +def _make_frozen(ctx: 'mypy.plugin.ClassDefContext', attributes: List[Attribute]) -> None: + """Turn all the attributes into properties to simulate frozen classes.""" + # TODO: Handle subclasses of frozen classes. + for attribute in attributes: + node = ctx.cls.info.names[attribute.name].node + assert isinstance(node, Var) + node.is_initialized_in_class = False + node.is_property = True + + +def _add_init(ctx: 'mypy.plugin.ClassDefContext', attributes: List[Attribute], + adder: 'MethodAdder') -> None: + """Generate an __init__ method for the attributes and add it to the class.""" + adder.add_method( + '__init__', + [attribute.argument(ctx) for attribute in attributes if attribute.init], + NoneTyp() + ) + for stmt in ctx.cls.defs.body: + # The type of classmethods will be wrong because it's based on the parent's __init__. + # Set it correctly. + if isinstance(stmt, Decorator) and stmt.func.is_class: + func_type = stmt.func.type + if isinstance(func_type, CallableType): + func_type.arg_types[0] = ctx.api.class_type(ctx.cls.info) + + +def _get_decorator_bool_argument( + ctx: 'mypy.plugin.ClassDefContext', + name: str, + default: bool) -> bool: + """Return the bool argument for the decorator. + + This handles both @attr.s(...) and @attr.s + """ + if isinstance(ctx.reason, CallExpr): + return _get_bool_argument(ctx, ctx.reason, name, default) + else: + return default + + +def _get_bool_argument(ctx: 'mypy.plugin.ClassDefContext', expr: CallExpr, + name: str, default: bool) -> bool: + """Return the boolean value for an argument to a call or the default if it's not found.""" + attr_value = _get_argument(expr, name) + if attr_value: + ret = ctx.api.parse_bool(attr_value) + if ret is None: + ctx.api.fail('"{}" argument must be True or False.'.format(name), expr) + return default + return ret + return default + + +def _get_argument(call: CallExpr, name: str) -> Optional[Expression]: + """Return the expression for the specific argument.""" + # To do this we use the CallableType of the callee to find the FormalArgument, + # then walk the actual CallExpr looking for the appropriate argument. + # + # Note: I'm not hard-coding the index so that in the future we can support other + # attrib and class makers. + callee_type = None + if (isinstance(call.callee, RefExpr) + and isinstance(call.callee.node, (Var, FuncBase)) + and call.callee.node.type): + callee_node_type = call.callee.node.type + if isinstance(callee_node_type, Overloaded): + # We take the last overload. + callee_type = callee_node_type.items()[-1] + elif isinstance(callee_node_type, CallableType): + callee_type = callee_node_type + + if not callee_type: + return None + + argument = callee_type.argument_by_name(name) + if not argument: + return None + assert argument.name + + for i, (attr_name, attr_value) in enumerate(zip(call.arg_names, call.args)): + if argument.pos is not None and not attr_name and i == argument.pos: + return attr_value + if attr_name == argument.name: + return attr_value + return None + + +class MethodAdder: + """Helper to add methods to a TypeInfo. + + info: The TypeInfo on which we will add methods. + function_type: The type of __builtins__.function that will be used as the + fallback for all methods added. + """ + + # TODO: Combine this with the code build_namedtuple_typeinfo to support both. + + def __init__(self, info: TypeInfo, function_type: Instance) -> None: + self.info = info + self.self_type = fill_typevars(info) + self.function_type = function_type + + def add_method(self, + method_name: str, args: List[Argument], ret_type: Type, + self_type: Optional[Type] = None, + tvd: Optional[TypeVarDef] = None) -> None: + """Add a method: def (self, ) -> ): ... to info. + + self_type: The type to use for the self argument or None to use the inferred self type. + tvd: If the method is generic these should be the type variables. + """ + from mypy.semanal import set_callable_name + self_type = self_type if self_type is not None else self.self_type + args = [Argument(Var('self'), self_type, None, ARG_POS)] + args + arg_types = [arg.type_annotation for arg in args] + arg_names = [arg.variable.name() for arg in args] + arg_kinds = [arg.kind for arg in args] + assert None not in arg_types + signature = CallableType(cast(List[Type], arg_types), arg_kinds, arg_names, + ret_type, self.function_type) + if tvd: + signature.variables = [tvd] + func = FuncDef(method_name, args, Block([PassStmt()])) + func.info = self.info + func.type = set_callable_name(signature, func) + func._fullname = self.info.fullname() + '.' + method_name + func.line = self.info.line + self.info.names[method_name] = SymbolTableNode(MDEF, func) + # Add the created methods to the body so that they can get further semantic analysis. + # e.g. Forward Reference Resolution. + self.info.defn.defs.body.append(func) diff --git a/mypy/test/testcheck.py b/mypy/test/testcheck.py index ddcf9de53320..b652e9b21287 100644 --- a/mypy/test/testcheck.py +++ b/mypy/test/testcheck.py @@ -74,6 +74,7 @@ 'check-incomplete-fixture.test', 'check-custom-plugin.test', 'check-default-plugin.test', + 'check-attr.test', ] diff --git a/test-data/unit/check-attr.test b/test-data/unit/check-attr.test new file mode 100644 index 000000000000..7062115876ec --- /dev/null +++ b/test-data/unit/check-attr.test @@ -0,0 +1,692 @@ +[case testAttrsSimple] +import attr +@attr.s +class A: + a = attr.ib() + _b = attr.ib() + c = attr.ib(18) + _d = attr.ib(validator=None, default=18) + E = 18 + + def foo(self): + return self.a +reveal_type(A) # E: Revealed type is 'def (a: Any, b: Any, c: Any =, d: Any =) -> __main__.A' +A(1, [2]) +A(1, [2], '3', 4) +A(1, 2, 3, 4) +A(1, [2], '3', 4, 5) # E: Too many arguments for "A" +[builtins fixtures/list.pyi] + +[case testAttrsAnnotated] +import attr +from typing import List, ClassVar +@attr.s +class A: + a: int = attr.ib() + _b: List[int] = attr.ib() + c: str = attr.ib('18') + _d: int = attr.ib(validator=None, default=18) + E = 7 + F: ClassVar[int] = 22 +reveal_type(A) # E: Revealed type is 'def (a: builtins.int, b: builtins.list[builtins.int], c: builtins.str =, d: builtins.int =) -> __main__.A' +A(1, [2]) +A(1, [2], '3', 4) +A(1, 2, 3, 4) # E: Argument 2 to "A" has incompatible type "int"; expected "List[int]" # E: Argument 3 to "A" has incompatible type "int"; expected "str" +A(1, [2], '3', 4, 5) # E: Too many arguments for "A" +[builtins fixtures/list.pyi] + +[case testAttrsPython2Annotations] +import attr +from typing import List, ClassVar +@attr.s +class A: + a = attr.ib() # type: int + _b = attr.ib() # type: List[int] + c = attr.ib('18') # type: str + _d = attr.ib(validator=None, default=18) # type: int + E = 7 + F: ClassVar[int] = 22 +reveal_type(A) # E: Revealed type is 'def (a: builtins.int, b: builtins.list[builtins.int], c: builtins.str =, d: builtins.int =) -> __main__.A' +A(1, [2]) +A(1, [2], '3', 4) +A(1, 2, 3, 4) # E: Argument 2 to "A" has incompatible type "int"; expected "List[int]" # E: Argument 3 to "A" has incompatible type "int"; expected "str" +A(1, [2], '3', 4, 5) # E: Too many arguments for "A" +[builtins fixtures/list.pyi] + +[case testAttrsAutoAttribs] +import attr +from typing import List, ClassVar +@attr.s(auto_attribs=True) +class A: + a: int + _b: List[int] + c: str = '18' + _d: int = attr.ib(validator=None, default=18) + E = 7 + F: ClassVar[int] = 22 +reveal_type(A) # E: Revealed type is 'def (a: builtins.int, b: builtins.list[builtins.int], c: builtins.str =, d: builtins.int =) -> __main__.A' +A(1, [2]) +A(1, [2], '3', 4) +A(1, 2, 3, 4) # E: Argument 2 to "A" has incompatible type "int"; expected "List[int]" # E: Argument 3 to "A" has incompatible type "int"; expected "str" +A(1, [2], '3', 4, 5) # E: Too many arguments for "A" +[builtins fixtures/list.pyi] + +[case testAttrsUntypedNoUntypedDefs] +# flags: --disallow-untyped-defs +import attr +@attr.s # E: Function is missing a type annotation for one or more arguments +class A: + a = attr.ib() # E: Need type annotation for 'a' + _b = attr.ib() # E: Need type annotation for '_b' + c = attr.ib(18) # E: Need type annotation for 'c' + _d = attr.ib(validator=None, default=18) # E: Need type annotation for '_d' + E = 18 +[builtins fixtures/bool.pyi] + +[case testAttrsWrongReturnValue] +import attr +@attr.s +class A: + x: int = attr.ib(8) + def foo(self) -> str: + return self.x # E: Incompatible return value type (got "int", expected "str") +@attr.s +class B: + x = attr.ib(8) # type: int + def foo(self) -> str: + return self.x # E: Incompatible return value type (got "int", expected "str") +@attr.dataclass +class C: + x: int = 8 + def foo(self) -> str: + return self.x # E: Incompatible return value type (got "int", expected "str") +@attr.s +class D: + x = attr.ib(8, type=int) + def foo(self) -> str: + return self.x # E: Incompatible return value type (got "int", expected "str") +[builtins fixtures/bool.pyi] + +[case testAttrsSeriousNames] +from attr import attrib, attrs +from typing import List +@attrs(init=True) +class A: + a = attrib() + _b: List[int] = attrib() + c = attrib(18) + _d = attrib(validator=None, default=18) + CLASS_VAR = 18 +reveal_type(A) # E: Revealed type is 'def (a: Any, b: builtins.list[builtins.int], c: Any =, d: Any =) -> __main__.A' +A(1, [2]) +A(1, [2], '3', 4) +A(1, 2, 3, 4) # E: Argument 2 to "A" has incompatible type "int"; expected "List[int]" +A(1, [2], '3', 4, 5) # E: Too many arguments for "A" +[builtins fixtures/list.pyi] + +[case testAttrsDefaultErrors] +import attr +@attr.s +class A: + x = attr.ib(default=17) + y = attr.ib() # E: Non-default attributes not allowed after default attributes. +@attr.s(auto_attribs=True) +class B: + x: int = 17 + y: int # E: Non-default attributes not allowed after default attributes. +@attr.s(auto_attribs=True) +class C: + x: int = attr.ib(default=17) + y: int # E: Non-default attributes not allowed after default attributes. +@attr.s +class D: + x = attr.ib() + y = attr.ib() # E: Non-default attributes not allowed after default attributes. + + @x.default + def foo(self): + return 17 +[builtins fixtures/bool.pyi] + +[case testAttrsNotBooleans] +import attr +x = True +@attr.s(cmp=x) # E: "cmp" argument must be True or False. +class A: + a = attr.ib(init=x) # E: "init" argument must be True or False. +[builtins fixtures/bool.pyi] + +[case testAttrsInitFalse] +from attr import attrib, attrs +@attrs(auto_attribs=True, init=False) +class A: + a: int + _b: int + c: int = 18 + _d: int = attrib(validator=None, default=18) +reveal_type(A) # E: Revealed type is 'def () -> __main__.A' +A() +A(1, [2]) # E: Too many arguments for "A" +A(1, [2], '3', 4) # E: Too many arguments for "A" +[builtins fixtures/list.pyi] + +[case testAttrsInitAttribFalse] +from attr import attrib, attrs +@attrs +class A: + a = attrib(init=False) + b = attrib() +reveal_type(A) # E: Revealed type is 'def (b: Any) -> __main__.A' +[builtins fixtures/bool.pyi] + +[case testAttrsCmpTrue] +from attr import attrib, attrs +@attrs(auto_attribs=True) +class A: + a: int +reveal_type(A) # E: Revealed type is 'def (a: builtins.int) -> __main__.A' +reveal_type(A.__eq__) # E: Revealed type is 'def (self: __main__.A, other: builtins.object) -> builtins.bool' +reveal_type(A.__ne__) # E: Revealed type is 'def (self: __main__.A, other: builtins.object) -> builtins.bool' +reveal_type(A.__lt__) # E: Revealed type is 'def [AT] (self: AT`1, other: AT`1) -> builtins.bool' +reveal_type(A.__le__) # E: Revealed type is 'def [AT] (self: AT`1, other: AT`1) -> builtins.bool' +reveal_type(A.__gt__) # E: Revealed type is 'def [AT] (self: AT`1, other: AT`1) -> builtins.bool' +reveal_type(A.__ge__) # E: Revealed type is 'def [AT] (self: AT`1, other: AT`1) -> builtins.bool' + +A(1) < A(2) +A(1) <= A(2) +A(1) > A(2) +A(1) >= A(2) +A(1) == A(2) +A(1) != A(2) + +A(1) < 1 # E: Unsupported operand types for < ("A" and "int") +A(1) <= 1 # E: Unsupported operand types for <= ("A" and "int") +A(1) > 1 # E: Unsupported operand types for > ("A" and "int") +A(1) >= 1 # E: Unsupported operand types for >= ("A" and "int") +A(1) == 1 +A(1) != 1 + +1 < A(1) # E: Unsupported operand types for > ("A" and "int") +1 <= A(1) # E: Unsupported operand types for >= ("A" and "int") +1 > A(1) # E: Unsupported operand types for < ("A" and "int") +1 >= A(1) # E: Unsupported operand types for <= ("A" and "int") +1 == A(1) +1 != A(1) +[builtins fixtures/attr.pyi] + +[case testAttrsCmpFalse] +from attr import attrib, attrs +@attrs(auto_attribs=True, cmp=False) +class A: + a: int +reveal_type(A) # E: Revealed type is 'def (a: builtins.int) -> __main__.A' +reveal_type(A.__eq__) # E: Revealed type is 'def (builtins.object, builtins.object) -> builtins.bool' +reveal_type(A.__ne__) # E: Revealed type is 'def (builtins.object, builtins.object) -> builtins.bool' + +A(1) < A(2) # E: Unsupported left operand type for < ("A") +A(1) <= A(2) # E: Unsupported left operand type for <= ("A") +A(1) > A(2) # E: Unsupported left operand type for > ("A") +A(1) >= A(2) # E: Unsupported left operand type for >= ("A") +A(1) == A(2) +A(1) != A(2) + +A(1) < 1 # E: Unsupported left operand type for < ("A") +A(1) <= 1 # E: Unsupported left operand type for <= ("A") +A(1) > 1 # E: Unsupported left operand type for > ("A") +A(1) >= 1 # E: Unsupported left operand type for >= ("A") +A(1) == 1 +A(1) != 1 + +1 < A(1) # E: Unsupported left operand type for < ("int") +1 <= A(1) # E: Unsupported left operand type for <= ("int") +1 > A(1) # E: Unsupported left operand type for > ("int") +1 >= A(1) # E: Unsupported left operand type for >= ("int") +1 == A(1) +1 != A(1) +[builtins fixtures/attr.pyi] + +[case testAttrsInheritance] +import attr +@attr.s +class A: + a: int = attr.ib() +@attr.s +class B: + b: str = attr.ib() +@attr.s +class C(A, B): + c: bool = attr.ib() +reveal_type(C) # E: Revealed type is 'def (a: builtins.int, b: builtins.str, c: builtins.bool) -> __main__.C' +[builtins fixtures/bool.pyi] + +[case testAttrsNestedInClasses] +import attr +@attr.s +class C: + y = attr.ib() + @attr.s + class D: + x: int = attr.ib() +reveal_type(C) # E: Revealed type is 'def (y: Any) -> __main__.C' +reveal_type(C.D) # E: Revealed type is 'def (x: builtins.int) -> __main__.C.D' +[builtins fixtures/bool.pyi] + +[case testAttrsInheritanceOverride] +import attr + +@attr.s +class A: + a: int = attr.ib() + x: int = attr.ib() + +@attr.s +class B(A): + b: str = attr.ib() + x: int = attr.ib(default=22) + +@attr.s +class C(B): + c: bool = attr.ib() # No error here because the x below overwrites the x above. + x: int = attr.ib() + +reveal_type(A) # E: Revealed type is 'def (a: builtins.int, x: builtins.int) -> __main__.A' +reveal_type(B) # E: Revealed type is 'def (a: builtins.int, b: builtins.str, x: builtins.int =) -> __main__.B' +reveal_type(C) # E: Revealed type is 'def (a: builtins.int, b: builtins.str, c: builtins.bool, x: builtins.int) -> __main__.C' +[builtins fixtures/bool.pyi] + +[case testAttrsTypeEquals] +import attr + +@attr.s +class A: + a = attr.ib(type=int) + b = attr.ib(18, type=int) +reveal_type(A) # E: Revealed type is 'def (a: builtins.int, b: builtins.int =) -> __main__.A' +[builtins fixtures/bool.pyi] + +[case testAttrsFrozen] +import attr + +@attr.s(frozen=True) +class A: + a = attr.ib() + +a = A(5) +a.a = 16 # E: Property "a" defined in "A" is read-only +[builtins fixtures/bool.pyi] + +[case testAttrsDataClass] +import attr +from typing import List, ClassVar +@attr.dataclass +class A: + a: int + _b: List[str] + c: str = '18' + _d: int = attr.ib(validator=None, default=18) + E = 7 + F: ClassVar[int] = 22 +reveal_type(A) # E: Revealed type is 'def (a: builtins.int, b: builtins.list[builtins.str], c: builtins.str =, d: builtins.int =) -> __main__.A' +A(1, ['2']) +[builtins fixtures/list.pyi] + +[case testAttrsTypeAlias] +from typing import List +import attr +Alias = List[int] +@attr.s(auto_attribs=True) +class A: + Alias2 = List[str] + x: Alias + y: Alias2 = attr.ib() +reveal_type(A) # E: Revealed type is 'def (x: builtins.list[builtins.int], y: builtins.list[builtins.str]) -> __main__.A' +[builtins fixtures/list.pyi] + +[case testAttrsGeneric] +from typing import TypeVar, Generic, List +import attr +T = TypeVar('T') +@attr.s(auto_attribs=True) +class A(Generic[T]): + x: List[T] + y: T = attr.ib() + def foo(self) -> List[T]: + return [self.y] + def bar(self) -> T: + return self.x[0] + def problem(self) -> T: + return self.x # E: Incompatible return value type (got "List[T]", expected "T") +reveal_type(A) # E: Revealed type is 'def [T] (x: builtins.list[T`1], y: T`1) -> __main__.A[T`1]' +a = A([1], 2) +reveal_type(a) # E: Revealed type is '__main__.A[builtins.int*]' +reveal_type(a.x) # E: Revealed type is 'builtins.list[builtins.int*]' +reveal_type(a.y) # E: Revealed type is 'builtins.int*' + +A(['str'], 7) # E: Cannot infer type argument 1 of "A" +A([1], '2') # E: Cannot infer type argument 1 of "A" + +[builtins fixtures/list.pyi] + +[case testAttrsForwardReference] +import attr +@attr.s(auto_attribs=True) +class A: + parent: 'B' + +@attr.s(auto_attribs=True) +class B: + parent: A + +reveal_type(A) # E: Revealed type is 'def (parent: __main__.B) -> __main__.A' +reveal_type(B) # E: Revealed type is 'def (parent: __main__.A) -> __main__.B' +A(B(None)) +[builtins fixtures/list.pyi] + +[case testAttrsForwardReferenceInClass] +import attr +@attr.s(auto_attribs=True) +class A: + parent: A.B + + @attr.s(auto_attribs=True) + class B: + parent: A + +reveal_type(A) # E: Revealed type is 'def (parent: __main__.A.B) -> __main__.A' +reveal_type(A.B) # E: Revealed type is 'def (parent: __main__.A) -> __main__.A.B' +A(A.B(None)) +[builtins fixtures/list.pyi] + +[case testAttrsImporting] +from helper import A +reveal_type(A) # E: Revealed type is 'def (a: builtins.int, b: builtins.str) -> helper.A' +[file helper.py] +import attr +@attr.s(auto_attribs=True) +class A: + a: int + b: str = attr.ib() +[builtins fixtures/list.pyi] + +[case testAttrsOtherMethods] +import attr +@attr.s(auto_attribs=True) +class A: + a: int + b: str = attr.ib() + @classmethod + def new(cls) -> A: + reveal_type(cls) # E: Revealed type is 'def (a: builtins.int, b: builtins.str) -> __main__.A' + return cls(6, 'hello') + @classmethod + def bad(cls) -> A: + return cls(17) # E: Too few arguments for "A" + def foo(self) -> int: + return self.a +reveal_type(A) # E: Revealed type is 'def (a: builtins.int, b: builtins.str) -> __main__.A' +a = A.new() +reveal_type(a.foo) # E: Revealed type is 'def () -> builtins.int' +[builtins fixtures/classmethod.pyi] + +[case testAttrsDefaultDecorator] +import attr +@attr.s +class C(object): + x: int = attr.ib(default=1) + y: int = attr.ib() + @y.default + def name_does_not_matter(self): + return self.x + 1 +C() +[builtins fixtures/list.pyi] + +[case testAttrsValidatorDecorator] +import attr +@attr.s +class C(object): + x = attr.ib() + @x.validator + def check(self, attribute, value): + if value > 42: + raise ValueError("x must be smaller or equal to 42") +C(42) +C(43) +[builtins fixtures/exception.pyi] + +[case testAttrsLocalVariablesInClassMethod] +import attr +@attr.s(auto_attribs=True) +class A: + a: int + b: int = attr.ib() + @classmethod + def new(cls, foo: int) -> A: + a = foo + b = a + return cls(a, b) +[builtins fixtures/classmethod.pyi] + +[case testAttrsUnionForward] +import attr +from typing import Union, List + +@attr.s(auto_attribs=True) +class A: + frob: List['AOrB'] + +class B: + pass + +AOrB = Union[A, B] + +reveal_type(A) # E: Revealed type is 'def (frob: builtins.list[Union[__main__.A, __main__.B]]) -> __main__.A' +reveal_type(B) # E: Revealed type is 'def () -> __main__.B' + +A([B()]) +[builtins fixtures/list.pyi] + +[case testAttrsUsingConvert] +import attr + +def convert(s:int) -> str: + return 'hello' + +@attr.s +class C: + x: str = attr.ib(convert=convert) # E: convert is deprecated, use converter + +# Because of the convert the __init__ takes an int, but the variable is a str. +reveal_type(C) # E: Revealed type is 'def (x: builtins.int) -> __main__.C' +reveal_type(C(15).x) # E: Revealed type is 'builtins.str' +[builtins fixtures/list.pyi] + +[case testAttrsUsingConverter] +import attr +import helper + +def converter2(s:int) -> str: + return 'hello' + +@attr.s +class C: + x: str = attr.ib(converter=helper.converter) + y: str = attr.ib(converter=converter2) + +# Because of the converter the __init__ takes an int, but the variable is a str. +reveal_type(C) # E: Revealed type is 'def (x: builtins.int, y: builtins.int) -> __main__.C' +reveal_type(C(15, 16).x) # E: Revealed type is 'builtins.str' +[file helper.py] +def converter(s:int) -> str: + return 'hello' +[builtins fixtures/list.pyi] + +[case testAttrsUsingConvertAndConverter] +import attr + +def converter(s:int) -> str: + return 'hello' + +@attr.s +class C: + x: str = attr.ib(converter=converter, convert=converter) # E: Can't pass both `convert` and `converter`. + +[builtins fixtures/list.pyi] + + +[case testAttrsUsingConverterAndSubclass] +import attr + +def converter(s:int) -> str: + return 'hello' + +@attr.s +class C: + x: str = attr.ib(converter=converter) + +@attr.s +class A(C): + pass + +# Because of the convert the __init__ takes an int, but the variable is a str. +reveal_type(A) # E: Revealed type is 'def (x: builtins.int) -> __main__.A' +reveal_type(A(15).x) # E: Revealed type is 'builtins.str' +[builtins fixtures/list.pyi] + +[case testAttrsCmpWithSubclasses] +import attr +@attr.s +class A: pass +@attr.s +class B: pass +@attr.s +class C(A, B): pass +@attr.s +class D(A): pass + +reveal_type(A.__lt__) # E: Revealed type is 'def [AT] (self: AT`1, other: AT`1) -> builtins.bool' +reveal_type(B.__lt__) # E: Revealed type is 'def [AT] (self: AT`1, other: AT`1) -> builtins.bool' +reveal_type(C.__lt__) # E: Revealed type is 'def [AT] (self: AT`1, other: AT`1) -> builtins.bool' +reveal_type(D.__lt__) # E: Revealed type is 'def [AT] (self: AT`1, other: AT`1) -> builtins.bool' + +A() < A() +B() < B() +A() < B() # E: Unsupported operand types for > ("B" and "A") + +C() > A() +C() > B() +C() > C() +C() > D() # E: Unsupported operand types for < ("D" and "C") + +D() >= A() +D() >= B() # E: Unsupported operand types for <= ("B" and "D") +D() >= C() # E: Unsupported operand types for <= ("C" and "D") +D() >= D() + +A() <= 1 # E: Unsupported operand types for <= ("A" and "int") +B() <= 1 # E: Unsupported operand types for <= ("B" and "int") +C() <= 1 # E: Unsupported operand types for <= ("C" and "int") +D() <= 1 # E: Unsupported operand types for <= ("D" and "int") + +A() == A() +B() == A() +C() == A() +D() == A() + +A() == int +B() == int +C() == int +D() == int +[builtins fixtures/list.pyi] + +[case testAttrsComplexSuperclass] +import attr +@attr.s +class C: + x: int = attr.ib(default=1) + y: int = attr.ib() + @y.default + def name_does_not_matter(self): + return self.x + 1 +@attr.s +class A(C): + z: int = attr.ib(default=18) +reveal_type(C) # E: Revealed type is 'def (x: builtins.int =, y: builtins.int =) -> __main__.C' +reveal_type(A) # E: Revealed type is 'def (x: builtins.int =, y: builtins.int =, z: builtins.int =) -> __main__.A' +[builtins fixtures/list.pyi] + +[case testAttrsMultiAssign] +import attr +@attr.s +class A: + x, y, z = attr.ib(), attr.ib(type=int), attr.ib(default=17) +reveal_type(A) # E: Revealed type is 'def (x: Any, y: builtins.int, z: Any =) -> __main__.A' +[builtins fixtures/list.pyi] + +[case testAttrsMultiAssign2] +import attr +@attr.s +class A: + x = y = z = attr.ib() # E: Too many names for one attribute +[builtins fixtures/list.pyi] + +[case testAttrsPrivateInit] +import attr +@attr.s +class C(object): + _x = attr.ib(init=False, default=42) +C() +C(_x=42) # E: Unexpected keyword argument "_x" for "C" +[builtins fixtures/list.pyi] + +[case testAttrsAutoMustBeAll] +import attr +@attr.s(auto_attribs=True) +class A: + a: int + b = 17 + # The following forms are not allowed with auto_attribs=True + c = attr.ib() # E: Need type annotation for 'c' + d, e = attr.ib(), attr.ib() # E: Need type annotation for 'd' # E: Need type annotation for 'e' + f = g = attr.ib() # E: Need type annotation for 'f' # E: Need type annotation for 'g' +[builtins fixtures/bool.pyi] + +[case testAttrsRepeatedName] +import attr +@attr.s +class A: + a = attr.ib(default=8) + b = attr.ib() + a = attr.ib() +reveal_type(A) # E: Revealed type is 'def (b: Any, a: Any) -> __main__.A' +@attr.s +class B: + a: int = attr.ib(default=8) + b: int = attr.ib() + a: int = attr.ib() # E: Name 'a' already defined +reveal_type(B) # E: Revealed type is 'def (b: builtins.int, a: builtins.int) -> __main__.B' +@attr.s(auto_attribs=True) +class C: + a: int = 8 + b: int + a: int = attr.ib() # E: Name 'a' already defined +reveal_type(C) # E: Revealed type is 'def (a: builtins.int, b: builtins.int) -> __main__.C' +[builtins fixtures/bool.pyi] + +[case testAttrsNewStyleClassPy2] +# flags: --py2 +import attr +@attr.s +class Good(object): + pass +@attr.s # E: attrs only works with new-style classes +class Bad: + pass +[builtins_py2 fixtures/bool.pyi] + +[case testAttrsAutoAttribsPy2] +# flags: --py2 +import attr +@attr.s(auto_attribs=True) # E: auto_attribs is not supported in Python 2 +class A(object): + x = attr.ib() +[builtins_py2 fixtures/bool.pyi] diff --git a/test-data/unit/check-incremental.test b/test-data/unit/check-incremental.test index ebd17c2b3411..60537eeef2ba 100644 --- a/test-data/unit/check-incremental.test +++ b/test-data/unit/check-incremental.test @@ -3467,6 +3467,299 @@ tmp/main.py:2: error: Expression has type "Any" [out2] tmp/main.py:2: error: Expression has type "Any" +[case testAttrsIncrementalSubclassingCached] +from a import A +import attr +@attr.s(auto_attribs=True) +class B(A): + e: str = 'e' +a = B(5, [5], 'foo') +a.a = 6 +a._b = [2] +a.c = 'yo' +a._d = 22 +a.e = 'hi' + +[file a.py] +import attr +import attr +from typing import List, ClassVar +@attr.s(auto_attribs=True) +class A: + a: int + _b: List[int] + c: str = '18' + _d: int = attr.ib(validator=None, default=18) + E = 7 + F: ClassVar[int] = 22 + +[builtins fixtures/list.pyi] +[out1] +[out2] + +[case testAttrsIncrementalSubclassingCachedConverter] +from a import A +import attr +@attr.s +class B(A): + pass +reveal_type(B) + +[file a.py] +def converter(s:int) -> str: + return 'hello' + +import attr +@attr.s +class A: + x: str = attr.ib(converter=converter) + +[builtins fixtures/list.pyi] +[out1] +main:6: error: Revealed type is 'def (x: builtins.int) -> __main__.B' + +[out2] +main:6: error: Revealed type is 'def (x: builtins.int) -> __main__.B' + +[case testAttrsIncrementalSubclassingCachedType] +from a import A +import attr +@attr.s +class B(A): + pass +reveal_type(B) + +[file a.py] +import attr +@attr.s +class A: + x = attr.ib(type=int) + +[builtins fixtures/list.pyi] +[out1] +main:6: error: Revealed type is 'def (x: builtins.int) -> __main__.B' +[out2] +main:6: error: Revealed type is 'def (x: builtins.int) -> __main__.B' + +[case testAttrsIncrementalArguments] +from a import Frozen, NoInit, NoCmp +f = Frozen(5) +f.x = 6 + +g = NoInit() + +Frozen(1) < Frozen(2) +Frozen(1) <= Frozen(2) +Frozen(1) > Frozen(2) +Frozen(1) >= Frozen(2) + +NoCmp(1) < NoCmp(2) +NoCmp(1) <= NoCmp(2) +NoCmp(1) > NoCmp(2) +NoCmp(1) >= NoCmp(2) + +[file a.py] +import attr +@attr.s(frozen=True) +class Frozen: + x: int = attr.ib() +@attr.s(init=False) +class NoInit: + x: int = attr.ib() +@attr.s(cmp=False) +class NoCmp: + x: int = attr.ib() + +[builtins fixtures/list.pyi] +[rechecked] +[stale] +[out1] +main:3: error: Property "x" defined in "Frozen" is read-only +main:12: error: Unsupported left operand type for < ("NoCmp") +main:13: error: Unsupported left operand type for <= ("NoCmp") +main:14: error: Unsupported left operand type for > ("NoCmp") +main:15: error: Unsupported left operand type for >= ("NoCmp") + +[out2] +main:3: error: Property "x" defined in "Frozen" is read-only +main:12: error: Unsupported left operand type for < ("NoCmp") +main:13: error: Unsupported left operand type for <= ("NoCmp") +main:14: error: Unsupported left operand type for > ("NoCmp") +main:15: error: Unsupported left operand type for >= ("NoCmp") + +[case testAttrsIncrementalDunder] +from a import A +reveal_type(A) # E: Revealed type is 'def (a: builtins.int) -> a.A' +reveal_type(A.__eq__) # E: Revealed type is 'def (self: a.A, other: builtins.object) -> builtins.bool' +reveal_type(A.__ne__) # E: Revealed type is 'def (self: a.A, other: builtins.object) -> builtins.bool' +reveal_type(A.__lt__) # E: Revealed type is 'def [AT] (self: AT`1, other: AT`1) -> builtins.bool' +reveal_type(A.__le__) # E: Revealed type is 'def [AT] (self: AT`1, other: AT`1) -> builtins.bool' +reveal_type(A.__gt__) # E: Revealed type is 'def [AT] (self: AT`1, other: AT`1) -> builtins.bool' +reveal_type(A.__ge__) # E: Revealed type is 'def [AT] (self: AT`1, other: AT`1) -> builtins.bool' + +A(1) < A(2) +A(1) <= A(2) +A(1) > A(2) +A(1) >= A(2) +A(1) == A(2) +A(1) != A(2) + +A(1) < 1 # E: Unsupported operand types for < ("A" and "int") +A(1) <= 1 # E: Unsupported operand types for <= ("A" and "int") +A(1) > 1 # E: Unsupported operand types for > ("A" and "int") +A(1) >= 1 # E: Unsupported operand types for >= ("A" and "int") +A(1) == 1 +A(1) != 1 + +1 < A(1) # E: Unsupported operand types for > ("A" and "int") +1 <= A(1) # E: Unsupported operand types for >= ("A" and "int") +1 > A(1) # E: Unsupported operand types for < ("A" and "int") +1 >= A(1) # E: Unsupported operand types for <= ("A" and "int") +1 == A(1) +1 != A(1) + +[file a.py] +from attr import attrib, attrs +@attrs(auto_attribs=True) +class A: + a: int + +[builtins fixtures/attr.pyi] +[rechecked] +[stale] +[out2] +main:2: error: Revealed type is 'def (a: builtins.int) -> a.A' +main:3: error: Revealed type is 'def (self: a.A, other: builtins.object) -> builtins.bool' +main:4: error: Revealed type is 'def (self: a.A, other: builtins.object) -> builtins.bool' +main:5: error: Revealed type is 'def [AT] (self: AT`1, other: AT`1) -> builtins.bool' +main:6: error: Revealed type is 'def [AT] (self: AT`1, other: AT`1) -> builtins.bool' +main:7: error: Revealed type is 'def [AT] (self: AT`1, other: AT`1) -> builtins.bool' +main:8: error: Revealed type is 'def [AT] (self: AT`1, other: AT`1) -> builtins.bool' +main:17: error: Unsupported operand types for < ("A" and "int") +main:18: error: Unsupported operand types for <= ("A" and "int") +main:19: error: Unsupported operand types for > ("A" and "int") +main:20: error: Unsupported operand types for >= ("A" and "int") +main:24: error: Unsupported operand types for > ("A" and "int") +main:25: error: Unsupported operand types for >= ("A" and "int") +main:26: error: Unsupported operand types for < ("A" and "int") +main:27: error: Unsupported operand types for <= ("A" and "int") + +[case testAttrsIncrementalSubclassModified] +from b import B +B(5, 'foo') + +[file a.py] +import attr +@attr.s(auto_attribs=True) +class A: + x: int + +[file b.py] +import attr +from a import A +@attr.s(auto_attribs=True) +class B(A): + y: str + +[file b.py.2] +import attr +from a import A +@attr.s(auto_attribs=True) +class B(A): + y: int + +[builtins fixtures/list.pyi] +[out1] +[out2] +main:2: error: Argument 2 to "B" has incompatible type "str"; expected "int" +[rechecked b] + +[case testAttrsIncrementalSubclassModifiedErrorFirst] +from b import B +B(5, 'foo') +[file a.py] +import attr +@attr.s(auto_attribs=True) +class A: + x: int + +[file b.py] +import attr +from a import A +@attr.s(auto_attribs=True) +class B(A): + y: int + +[file b.py.2] +import attr +from a import A +@attr.s(auto_attribs=True) +class B(A): + y: str + +[builtins fixtures/list.pyi] +[out1] +main:2: error: Argument 2 to "B" has incompatible type "str"; expected "int" + +[out2] +[rechecked b] + +[case testAttrsIncrementalThreeFiles] +from c import C +C(5, 'foo', True) + +[file a.py] +import attr +@attr.s +class A: + a: int = attr.ib() + +[file b.py] +import attr +@attr.s +class B: + b: str = attr.ib() + +[file c.py] +from a import A +from b import B +import attr +@attr.s +class C(A, B): + c: bool = attr.ib() + +[builtins fixtures/list.pyi] +[out1] +[out2] + +[case testAttrsIncrementalThreeRuns] +from a import A +A(5) + +[file a.py] +import attr +@attr.s(auto_attribs=True) +class A: + a: int + +[file a.py.2] +import attr +@attr.s(auto_attribs=True) +class A: + a: str + +[file a.py.3] +import attr +@attr.s(auto_attribs=True) +class A: + a: int = 6 + +[builtins fixtures/list.pyi] +[out1] +[out2] +main:2: error: Argument 1 to "A" has incompatible type "int"; expected "str" +[out3] + [case testDeletedDepLineNumber] # The import is not on line 1 and that data should be preserved import a diff --git a/test-data/unit/fixtures/attr.pyi b/test-data/unit/fixtures/attr.pyi new file mode 100644 index 000000000000..01a118046f46 --- /dev/null +++ b/test-data/unit/fixtures/attr.pyi @@ -0,0 +1,14 @@ +# Builtins stub used to support @attr.s tests. + +class object: + def __init__(self) -> None: pass + def __eq__(self, o: object) -> bool: pass + def __ne__(self, o: object) -> bool: pass + +class type: pass +class function: pass +class bool: pass +class int: pass +class str: pass +class unicode: pass +class ellipsis: pass diff --git a/test-data/unit/fixtures/bool.pyi b/test-data/unit/fixtures/bool.pyi index a1d1b9c1fdf5..bf506d97312f 100644 --- a/test-data/unit/fixtures/bool.pyi +++ b/test-data/unit/fixtures/bool.pyi @@ -12,3 +12,4 @@ class bool: pass class int: pass class str: pass class unicode: pass +class ellipsis: pass diff --git a/test-data/unit/fixtures/classmethod.pyi b/test-data/unit/fixtures/classmethod.pyi index 6d7f71bd52ff..bac4ea7d153b 100644 --- a/test-data/unit/fixtures/classmethod.pyi +++ b/test-data/unit/fixtures/classmethod.pyi @@ -22,5 +22,6 @@ class int: class str: pass class bytes: pass class bool: pass +class ellipsis: pass class tuple(typing.Generic[_T]): pass diff --git a/test-data/unit/fixtures/exception.pyi b/test-data/unit/fixtures/exception.pyi index 999a73739364..b6810d41fd1f 100644 --- a/test-data/unit/fixtures/exception.pyi +++ b/test-data/unit/fixtures/exception.pyi @@ -11,5 +11,6 @@ class int: pass class str: pass class unicode: pass class bool: pass +class ellipsis: pass class BaseException: pass diff --git a/test-data/unit/lib-stub/attr.pyi b/test-data/unit/lib-stub/attr.pyi new file mode 100644 index 000000000000..d62a99a685eb --- /dev/null +++ b/test-data/unit/lib-stub/attr.pyi @@ -0,0 +1,57 @@ +from typing import TypeVar, overload, Callable, Any, Type, Optional + +_T = TypeVar('_T') +_C = TypeVar('_C', bound=type) + +@overload +def attr(default: Optional[_T] = ..., + validator: Optional[Any] = ..., + repr: bool = ..., + cmp: bool = ..., + hash: Optional[bool] = ..., + init: bool = ..., + convert: Optional[Callable[[Any], _T]] = ..., + metadata: Any = ..., + type: Optional[Type[_T]] = ..., + converter: Optional[Callable[[Any], _T]] = ...) -> _T: ... +@overload +def attr(default: None = ..., + validator: None = ..., + repr: bool = ..., + cmp: bool = ..., + hash: Optional[bool] = ..., + init: bool = ..., + convert: Optional[Callable[[Any], _T]] = ..., + metadata: Any = ..., + type: None = ..., + converter: None = ...) -> Any: ... + +@overload +def attributes(maybe_cls: _C, + these: Optional[Any] = ..., + repr_ns: Optional[str] = ..., + repr: bool = ..., + cmp: bool = ..., + hash: Optional[bool] = ..., + init: bool = ..., + slots: bool = ..., + frozen: bool = ..., + str: bool = ..., + auto_attribs: bool = ...) -> _C: ... +@overload +def attributes(maybe_cls: None = ..., + these: Optional[Any] = ..., + repr_ns: Optional[str] = ..., + repr: bool = ..., + cmp: bool = ..., + hash: Optional[bool] = ..., + init: bool = ..., + slots: bool = ..., + frozen: bool = ..., + str: bool = ..., + auto_attribs: bool = ...) -> Callable[[_C], _C]: ... + +# aliases +s = attrs = attributes +ib = attrib = attr +dataclass = attrs # Technically, partial(attrs, auto_attribs=True) ;)