Skip to content

Commit 3ce5398

Browse files
ezyangfacebook-github-bot
authored andcommitted
Back out "Revert D25757721: [pytorch][PR] Run mypy on more test files" (#50142)
Summary: Pull Request resolved: #50142 Original commit changeset: 58437d719285 Test Plan: OSS CI Reviewed By: walterddr, ngimel Differential Revision: D25803866 fbshipit-source-id: d6b83a5211e430c0451994391876103f1ad96315
1 parent 6380869 commit 3ce5398

File tree

6 files changed

+30
-14
lines changed

6 files changed

+30
-14
lines changed

mypy.ini

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -17,8 +17,13 @@ check_untyped_defs = True
1717
files =
1818
torch,
1919
caffe2,
20+
test/test_bundled_images.py,
21+
test/test_bundled_inputs.py,
2022
test/test_complex.py,
23+
test/test_dataset.py,
24+
test/test_expecttest.py,
2125
test/test_futures.py,
26+
test/test_numpy_interop.py,
2227
test/test_torch.py,
2328
test/test_type_hints.py,
2429
test/test_type_info.py
@@ -119,6 +124,12 @@ ignore_errors = True
119124
[mypy-torch.overrides]
120125
ignore_errors = True
121126

127+
#
128+
# Adding type annotations to caffe2 is probably not worth the effort
129+
# only work on this if you have a specific reason for it, otherwise
130+
# leave these ignores as they are.
131+
#
132+
122133
[mypy-caffe2.python.*]
123134
ignore_errors = True
124135

test/test_bundled_inputs.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,7 @@
11
#!/usr/bin/env python3
22
import io
3+
from typing import List
4+
35
import torch
46
import torch.utils.bundled_inputs
57
from torch.testing._internal.common_utils import TestCase, run_tests
@@ -27,7 +29,7 @@ def forward(self, arg):
2729

2830
sm = torch.jit.script(SingleTensorModel())
2931
original_size = model_size(sm)
30-
get_expr = []
32+
get_expr : List[str] = []
3133
samples = [
3234
# Tensor with small numel and small storage.
3335
(torch.tensor([1]),),

test/test_expecttest.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@
44
import string
55
import textwrap
66
import doctest
7+
from typing import Dict, Any
78

89
import hypothesis
910
from hypothesis.strategies import text, integers, composite, sampled_from, booleans
@@ -38,7 +39,7 @@ def test_replace_string_literal_roundtrip(self, t, raw, quote):
3839
r3 = {r}{quote}placeholder3{quote}
3940
""".format(r='r' if raw else '', quote=quote * 3)
4041
new_prog = expecttest.replace_string_literal(textwrap.dedent(prog), 2, t)[0]
41-
ns = {}
42+
ns : Dict[str, Any] = {}
4243
exec(new_prog, ns)
4344
msg = "program was:\n{}".format(new_prog)
4445
self.assertEqual(ns['r'], 'placeholder', msg=msg) # noqa: F821

test/test_numpy_interop.py

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -47,10 +47,8 @@ def get_castable_tensor(shape, dtype):
4747
else:
4848
# can't directly use min and max, because for int64_t, max - min
4949
# is greater than int64_t range and triggers UB.
50-
dtype_info = torch.iinfo(dtype)
51-
low = max(dtype_info.min, int(-1e10))
52-
high = min(dtype_info.max, int(1e10))
53-
dtype_info = torch.iinfo(dtype)
50+
low = max(torch.iinfo(dtype).min, int(-1e10))
51+
high = min(torch.iinfo(dtype).max, int(1e10))
5452
t = torch.empty(shape, dtype=torch.int64).random_(low, high)
5553
return t.to(dtype)
5654

@@ -272,10 +270,12 @@ def test_numpy_array_interface(self, device):
272270
]
273271
for tp, dtype in zip(types, dtypes):
274272
if np.dtype(dtype).kind == 'u':
275-
x = torch.Tensor([1, 2, 3, 4]).type(tp)
273+
# .type expects a XxxTensor, which have no type hints on
274+
# purpose, so ignore during mypy type checking
275+
x = torch.Tensor([1, 2, 3, 4]).type(tp) # type: ignore
276276
array = np.array([1, 2, 3, 4], dtype=dtype)
277277
else:
278-
x = torch.Tensor([1, -2, 3, -4]).type(tp)
278+
x = torch.Tensor([1, -2, 3, -4]).type(tp) # type: ignore
279279
array = np.array([1, -2, 3, -4], dtype=dtype)
280280

281281
# Test __array__ w/o dtype argument
@@ -309,7 +309,7 @@ def test_numpy_array_interface(self, device):
309309
float_types = [torch.DoubleTensor, torch.FloatTensor]
310310
float_dtypes = [np.float64, np.float32]
311311
for tp, dtype in zip(float_types, float_dtypes):
312-
x = torch.Tensor([1, 2, 3, 4]).type(tp)
312+
x = torch.Tensor([1, 2, 3, 4]).type(tp) # type: ignore
313313
array = np.array([1, 2, 3, 4], dtype=dtype)
314314
for func in ['sin', 'sqrt', 'ceil']:
315315
ufunc = getattr(np, func)
@@ -321,7 +321,7 @@ def test_numpy_array_interface(self, device):
321321

322322
# Test functions with boolean return value
323323
for tp, dtype in zip(types, dtypes):
324-
x = torch.Tensor([1, 2, 3, 4]).type(tp)
324+
x = torch.Tensor([1, 2, 3, 4]).type(tp) # type: ignore
325325
array = np.array([1, 2, 3, 4], dtype=dtype)
326326
geq2_x = np.greater_equal(x, 2)
327327
geq2_array = np.greater_equal(array, 2).astype('uint8')
@@ -360,7 +360,7 @@ def test_parse_numpy_int(self, device):
360360
self.assertEqual(torch.ones([2, 2, 2, 2]).mean(scalar), torch.ones([2, 2, 2, 2]).mean(np_val))
361361

362362
# numpy integral type parses like a python int in custom python bindings:
363-
self.assertEqual(torch.Storage(np_val).size(), scalar)
363+
self.assertEqual(torch.Storage(np_val).size(), scalar) # type: ignore
364364

365365
tensor = torch.tensor([2], dtype=torch.int)
366366
tensor[0] = np_val

torch/testing/_internal/expecttest.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@
33
import traceback
44
import os
55
import string
6+
from typing import Tuple
67

78

89
# This file implements expect tests (also known as "golden" tests).
@@ -139,7 +140,8 @@ def ok_for_raw_triple_quoted_string(s, quote):
139140
r"(?P<raw>r?)", re.DOTALL)
140141

141142

142-
def replace_string_literal(src, lineno, new_string):
143+
def replace_string_literal(src : str, lineno : int,
144+
new_string : str) -> Tuple[str, int]:
143145
r"""
144146
Replace a triple quoted string literal with new contents.
145147
Only handles printable ASCII correctly at the moment. This

torch/utils/bundled_inputs.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
#!/usr/bin/env python3
2-
from typing import Any, TypeVar, Optional, Tuple, List, NamedTuple, Union
2+
from typing import Any, TypeVar, Optional, Tuple, List, NamedTuple, Union, Sequence
33
import textwrap
44
import torch
55
from torch._C import TupleType, OptionalType, ListType
@@ -17,7 +17,7 @@ class InflatableArg(NamedTuple):
1717

1818
def augment_model_with_bundled_inputs(
1919
model: torch.jit.ScriptModule,
20-
inputs: Optional[List[Tuple[Any, ...]]] = None,
20+
inputs: Optional[Sequence[Tuple[Any, ...]]] = None,
2121
_receive_inflate_expr: Optional[List[str]] = None, # For debugging.
2222
) -> None:
2323
"""Add bundled sample inputs to a model.

0 commit comments

Comments
 (0)