Skip to content

Commit aaa3752

Browse files
committed
Merge branch 'ignore-multiline'
Fixes #627.
2 parents dd83104 + 33aa212 commit aaa3752

File tree

6 files changed

+61
-16
lines changed

6 files changed

+61
-16
lines changed

mypy/lex.py

Lines changed: 17 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -157,14 +157,16 @@ def __str__(self):
157157
INVALID_DEDENT = 5
158158

159159

160-
def lex(string: Union[str, bytes], first_line: int = 1, pyversion: int = 3) -> List[Token]:
161-
"""Analyze string and return an array of token objects.
160+
def lex(string: Union[str, bytes], first_line: int = 1,
161+
pyversion: int = 3) -> Tuple[List[Token], Set[int]]:
162+
"""Analyze string, and return an array of token objects and the lines to ignore.
162163
163-
The last token is always Eof.
164+
The last token is always Eof. The intention is to ignore any
165+
semantic and type check errors on the ignored lines.
164166
"""
165167
l = Lexer(pyversion)
166168
l.lex(string, first_line)
167-
return l.tok
169+
return l.tok, l.ignored_lines
168170

169171

170172
# Reserved words (not including operators)
@@ -291,12 +293,16 @@ class Lexer:
291293

292294
pyversion = 3
293295

296+
# Lines to ignore (using # type: ignore).
297+
ignored_lines = Undefined(Set[int])
298+
294299
def __init__(self, pyversion: int = 3) -> None:
295300
self.map = [self.unknown_character] * 256
296301
self.tok = []
297302
self.indents = [0]
298303
self.open_brackets = []
299304
self.pyversion = pyversion
305+
self.ignored_lines = set()
300306
# Fill in the map from valid character codes to relevant lexer methods.
301307
for seq, method in [('ABCDEFGHIJKLMNOPQRSTUVWXYZ', self.lex_name),
302308
('abcdefghijklmnopqrstuvwxyz_', self.lex_name),
@@ -811,6 +817,8 @@ def add_pre_whitespace(self, s: str) -> None:
811817
self.pre_whitespace += s
812818
self.i += len(s)
813819

820+
type_ignore_exp = re.compile(r'[ \t]*#[ \t]*type:[ \t]*ignore\b')
821+
814822
def add_token(self, tok: Token) -> None:
815823
"""Store a token.
816824
@@ -823,6 +831,11 @@ def add_token(self, tok: Token) -> None:
823831
and not isinstance(tok, Dedent)):
824832
raise ValueError('Empty token')
825833
tok.pre = self.pre_whitespace
834+
if self.type_ignore_exp.match(tok.pre):
835+
delta = 0
836+
if '\n' in tok.pre or '\r' in tok.pre:
837+
delta += 1
838+
self.ignored_lines.add(self.line - delta)
826839
tok.line = self.line
827840
self.tok.append(tok)
828841
self.i += len(tok.string)

mypy/parse.py

Lines changed: 3 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -112,11 +112,10 @@ def __init__(self, fnam: str, errors: Errors, pyversion: int,
112112
self.errors.set_file('<input>')
113113

114114
def parse(self, s: Union[str, bytes]) -> MypyFile:
115-
self.tok = lex.lex(s, pyversion=self.pyversion)
115+
self.tok, self.ignored_lines = lex.lex(s, pyversion=self.pyversion)
116116
self.ind = 0
117117
self.imports = []
118118
self.future_options = []
119-
self.ignored_lines = set()
120119
file = self.parse_file()
121120
if self.raise_on_error and self.errors.is_errors():
122121
self.errors.raise_error()
@@ -1564,13 +1563,8 @@ def expect_type(self, typ: type) -> Token:
15641563
def expect_colon_and_break(self) -> Tuple[Token, Token]:
15651564
return self.expect_type(Colon), self.expect_type(Break)
15661565

1567-
type_ignore_exp = re.compile(r'[ \t]*#[ \t]*type:[ \t]*ignore\b')
1568-
15691566
def expect_break(self) -> Token:
1570-
token = self.expect_type(Break)
1571-
if self.type_ignore_exp.match(token.pre):
1572-
self.ignored_lines.add(token.line)
1573-
return token
1567+
return self.expect_type(Break)
15741568

15751569
def current(self) -> Token:
15761570
return self.tok[self.ind]
@@ -1645,7 +1639,7 @@ def parse_type_comment(self, token: Token, signature: bool) -> Type:
16451639
if type_as_str == 'ignore':
16461640
# Actually a "# type: ignore" annotation -> not a type.
16471641
return None
1648-
tokens = lex.lex(type_as_str, token.line)
1642+
tokens = lex.lex(type_as_str, token.line)[0]
16491643
if len(tokens) < 2:
16501644
# Empty annotation (only Eof token)
16511645
self.errors.report(token.line, 'Empty type annotation')

mypy/parsetype.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -189,7 +189,7 @@ def parse_str_as_type(typestr: str, line: int) -> Type:
189189
"""
190190

191191
typestr = typestr.strip()
192-
tokens = lex(typestr, line)
192+
tokens = lex(typestr, line)[0]
193193
result, i = parse_type(tokens, 0)
194194
if i < len(tokens) - 2:
195195
raise TypeParseError(tokens[i], i)

mypy/test/data/check-ignore.test

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,14 @@ a.foo
1919
b()
2020
1() # E: "int" not callable
2121

22+
[case testIgnoreImportFromErrorMultiline]
23+
from xyz_m import ( # type: ignore
24+
a, b
25+
)
26+
a.foo
27+
b()
28+
1() # E: "int" not callable
29+
2230
[case testIgnoreImportAllError]
2331
from xyz_m import * # type: ignore
2432
x # E: Name 'x' is not defined

mypy/test/data/parse.test

Lines changed: 30 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3292,3 +3292,33 @@ MypyFile:1(
32923292
ExpressionStmt:2(
32933293
NameExpr(y))
32943294
IgnoredLines(1, 2))
3295+
3296+
[case testIgnoreAnnotationAndMultilineStatement]
3297+
x = {
3298+
1: 2 # type: ignore
3299+
}
3300+
y = { # type: ignore
3301+
1: 2
3302+
} # type: ignore
3303+
[out]
3304+
MypyFile:1(
3305+
AssignmentStmt:1(
3306+
NameExpr(x)
3307+
DictExpr:1(
3308+
IntExpr(1)
3309+
IntExpr(2)))
3310+
AssignmentStmt:4(
3311+
NameExpr(y)
3312+
DictExpr:4(
3313+
IntExpr(1)
3314+
IntExpr(2)))
3315+
IgnoredLines(2, 4, 6))
3316+
3317+
[case testIgnoreAnnotationAndMultilineStatement2]
3318+
from m import ( # type: ignore
3319+
x, y
3320+
)
3321+
[out]
3322+
MypyFile:1(
3323+
ImportFrom:1(m, [x : x, y : y])
3324+
IgnoredLines(1))

mypy/test/testlex.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -425,7 +425,7 @@ def assert_lex(self, src, lexed):
425425
if lexed.endswith(' ...'):
426426
lexed = lexed[:-3] + 'Break() Eof()'
427427

428-
l = lex(src)
428+
l = lex(src)[0]
429429
r = []
430430
for t in l:
431431
r.append(str(t))
@@ -439,7 +439,7 @@ def assert_line(self, s, a):
439439
s = s.replace('\\n', '\n')
440440
s = s.replace('\\r', '\r')
441441

442-
tt = lex(s)
442+
tt = lex(s)[0]
443443
r = []
444444
for t in tt:
445445
r.append(t.line)

0 commit comments

Comments
 (0)