1
1
import numbers
2
- from typing import Any , List , Sequence
2
+ from typing import Any , Dict , List , Optional , Sequence , Tuple , Union
3
3
4
4
import numpy as np
5
5
import torch
@@ -34,23 +34,23 @@ def _get_image_num_channels(img: Any) -> int:
34
34
35
35
36
36
@torch .jit .unused
37
- def hflip (img ) :
37
+ def hflip (img : Image . Image ) -> Image . Image :
38
38
if not _is_pil_image (img ):
39
39
raise TypeError ('img should be PIL Image. Got {}' .format (type (img )))
40
40
41
41
return img .transpose (Image .FLIP_LEFT_RIGHT )
42
42
43
43
44
44
@torch .jit .unused
45
- def vflip (img ) :
45
+ def vflip (img : Image . Image ) -> Image . Image :
46
46
if not _is_pil_image (img ):
47
47
raise TypeError ('img should be PIL Image. Got {}' .format (type (img )))
48
48
49
49
return img .transpose (Image .FLIP_TOP_BOTTOM )
50
50
51
51
52
52
@torch .jit .unused
53
- def adjust_brightness (img , brightness_factor ) :
53
+ def adjust_brightness (img : Image . Image , brightness_factor : float ) -> Image . Image :
54
54
if not _is_pil_image (img ):
55
55
raise TypeError ('img should be PIL Image. Got {}' .format (type (img )))
56
56
@@ -60,7 +60,7 @@ def adjust_brightness(img, brightness_factor):
60
60
61
61
62
62
@torch .jit .unused
63
- def adjust_contrast (img , contrast_factor ) :
63
+ def adjust_contrast (img : Image . Image , contrast_factor : float ) -> Image . Image :
64
64
if not _is_pil_image (img ):
65
65
raise TypeError ('img should be PIL Image. Got {}' .format (type (img )))
66
66
@@ -70,7 +70,7 @@ def adjust_contrast(img, contrast_factor):
70
70
71
71
72
72
@torch .jit .unused
73
- def adjust_saturation (img , saturation_factor ) :
73
+ def adjust_saturation (img : Image . Image , saturation_factor : float ) -> Image . Image :
74
74
if not _is_pil_image (img ):
75
75
raise TypeError ('img should be PIL Image. Got {}' .format (type (img )))
76
76
@@ -80,7 +80,7 @@ def adjust_saturation(img, saturation_factor):
80
80
81
81
82
82
@torch .jit .unused
83
- def adjust_hue (img , hue_factor ) :
83
+ def adjust_hue (img : Image . Image , hue_factor : float ) -> Image . Image :
84
84
if not (- 0.5 <= hue_factor <= 0.5 ):
85
85
raise ValueError ('hue_factor ({}) is not in [-0.5, 0.5].' .format (hue_factor ))
86
86
@@ -104,7 +104,12 @@ def adjust_hue(img, hue_factor):
104
104
105
105
106
106
@torch .jit .unused
107
- def adjust_gamma (img , gamma , gain = 1 ):
107
+ def adjust_gamma (
108
+ img : Image .Image ,
109
+ gamma : float ,
110
+ gain : float = 1.0 ,
111
+ ) -> Image .Image :
112
+
108
113
if not _is_pil_image (img ):
109
114
raise TypeError ('img should be PIL Image. Got {}' .format (type (img )))
110
115
@@ -121,7 +126,13 @@ def adjust_gamma(img, gamma, gain=1):
121
126
122
127
123
128
@torch .jit .unused
124
- def pad (img , padding , fill = 0 , padding_mode = "constant" ):
129
+ def pad (
130
+ img : Image .Image ,
131
+ padding : Union [int , List [int ], Tuple [int , ...]],
132
+ fill : Optional [Union [float , List [float ], Tuple [float , ...]]] = 0 ,
133
+ padding_mode : str = "constant" ,
134
+ ) -> Image .Image :
135
+
125
136
if not _is_pil_image (img ):
126
137
raise TypeError ("img should be PIL Image. Got {}" .format (type (img )))
127
138
@@ -196,15 +207,28 @@ def pad(img, padding, fill=0, padding_mode="constant"):
196
207
197
208
198
209
@torch .jit .unused
199
- def crop (img : Image .Image , top : int , left : int , height : int , width : int ) -> Image .Image :
210
+ def crop (
211
+ img : Image .Image ,
212
+ top : int ,
213
+ left : int ,
214
+ height : int ,
215
+ width : int ,
216
+ ) -> Image .Image :
217
+
200
218
if not _is_pil_image (img ):
201
219
raise TypeError ('img should be PIL Image. Got {}' .format (type (img )))
202
220
203
221
return img .crop ((left , top , left + width , top + height ))
204
222
205
223
206
224
@torch .jit .unused
207
- def resize (img , size , interpolation = Image .BILINEAR , max_size = None ):
225
+ def resize (
226
+ img : Image .Image ,
227
+ size : Union [Sequence [int ], int ],
228
+ interpolation : int = Image .BILINEAR ,
229
+ max_size : Optional [int ] = None ,
230
+ ) -> Image .Image :
231
+
208
232
if not _is_pil_image (img ):
209
233
raise TypeError ('img should be PIL Image. Got {}' .format (type (img )))
210
234
if not (isinstance (size , int ) or (isinstance (size , Sequence ) and len (size ) in (1 , 2 ))):
@@ -242,7 +266,12 @@ def resize(img, size, interpolation=Image.BILINEAR, max_size=None):
242
266
243
267
244
268
@torch .jit .unused
245
- def _parse_fill (fill , img , name = "fillcolor" ):
269
+ def _parse_fill (
270
+ fill : Optional [Union [float , List [float ], Tuple [float , ...]]],
271
+ img : Image .Image ,
272
+ name : str = "fillcolor" ,
273
+ ) -> Dict [str , Optional [Union [float , List [float ], Tuple [float , ...]]]]:
274
+
246
275
# Process fill color for affine transforms
247
276
num_bands = len (img .getbands ())
248
277
if fill is None :
@@ -261,7 +290,13 @@ def _parse_fill(fill, img, name="fillcolor"):
261
290
262
291
263
292
@torch .jit .unused
264
- def affine (img , matrix , interpolation = 0 , fill = None ):
293
+ def affine (
294
+ img : Image .Image ,
295
+ matrix : List [float ],
296
+ interpolation : int = Image .NEAREST ,
297
+ fill : Optional [Union [float , List [float ], Tuple [float , ...]]] = 0 ,
298
+ ) -> Image .Image :
299
+
265
300
if not _is_pil_image (img ):
266
301
raise TypeError ('img should be PIL Image. Got {}' .format (type (img )))
267
302
@@ -271,7 +306,15 @@ def affine(img, matrix, interpolation=0, fill=None):
271
306
272
307
273
308
@torch .jit .unused
274
- def rotate (img , angle , interpolation = 0 , expand = False , center = None , fill = None ):
309
+ def rotate (
310
+ img : Image .Image ,
311
+ angle : float ,
312
+ interpolation : int = Image .NEAREST ,
313
+ expand : bool = False ,
314
+ center : Optional [Tuple [int , int ]] = None ,
315
+ fill : Optional [Union [float , List [float ], Tuple [float , ...]]] = 0 ,
316
+ ) -> Image .Image :
317
+
275
318
if not _is_pil_image (img ):
276
319
raise TypeError ("img should be PIL Image. Got {}" .format (type (img )))
277
320
@@ -280,7 +323,13 @@ def rotate(img, angle, interpolation=0, expand=False, center=None, fill=None):
280
323
281
324
282
325
@torch .jit .unused
283
- def perspective (img , perspective_coeffs , interpolation = Image .BICUBIC , fill = None ):
326
+ def perspective (
327
+ img : Image .Image ,
328
+ perspective_coeffs : float ,
329
+ interpolation : int = Image .BICUBIC ,
330
+ fill : Optional [Union [float , List [float ], Tuple [float , ...]]] = 0 ,
331
+ ) -> Image .Image :
332
+
284
333
if not _is_pil_image (img ):
285
334
raise TypeError ('img should be PIL Image. Got {}' .format (type (img )))
286
335
@@ -290,7 +339,7 @@ def perspective(img, perspective_coeffs, interpolation=Image.BICUBIC, fill=None)
290
339
291
340
292
341
@torch .jit .unused
293
- def to_grayscale (img , num_output_channels ) :
342
+ def to_grayscale (img : Image . Image , num_output_channels : int ) -> Image . Image :
294
343
if not _is_pil_image (img ):
295
344
raise TypeError ('img should be PIL Image. Got {}' .format (type (img )))
296
345
@@ -308,28 +357,28 @@ def to_grayscale(img, num_output_channels):
308
357
309
358
310
359
@torch .jit .unused
311
- def invert (img ) :
360
+ def invert (img : Image . Image ) -> Image . Image :
312
361
if not _is_pil_image (img ):
313
362
raise TypeError ('img should be PIL Image. Got {}' .format (type (img )))
314
363
return ImageOps .invert (img )
315
364
316
365
317
366
@torch .jit .unused
318
- def posterize (img , bits ) :
367
+ def posterize (img : Image . Image , bits : int ) -> Image . Image :
319
368
if not _is_pil_image (img ):
320
369
raise TypeError ('img should be PIL Image. Got {}' .format (type (img )))
321
370
return ImageOps .posterize (img , bits )
322
371
323
372
324
373
@torch .jit .unused
325
- def solarize (img , threshold ) :
374
+ def solarize (img : Image . Image , threshold : int ) -> Image . Image :
326
375
if not _is_pil_image (img ):
327
376
raise TypeError ('img should be PIL Image. Got {}' .format (type (img )))
328
377
return ImageOps .solarize (img , threshold )
329
378
330
379
331
380
@torch .jit .unused
332
- def adjust_sharpness (img , sharpness_factor ) :
381
+ def adjust_sharpness (img : Image . Image , sharpness_factor : float ) -> Image . Image :
333
382
if not _is_pil_image (img ):
334
383
raise TypeError ('img should be PIL Image. Got {}' .format (type (img )))
335
384
@@ -339,14 +388,14 @@ def adjust_sharpness(img, sharpness_factor):
339
388
340
389
341
390
@torch .jit .unused
342
- def autocontrast (img ) :
391
+ def autocontrast (img : Image . Image ) -> Image . Image :
343
392
if not _is_pil_image (img ):
344
393
raise TypeError ('img should be PIL Image. Got {}' .format (type (img )))
345
394
return ImageOps .autocontrast (img )
346
395
347
396
348
397
@torch .jit .unused
349
- def equalize (img ) :
398
+ def equalize (img : Image . Image ) -> Image . Image :
350
399
if not _is_pil_image (img ):
351
400
raise TypeError ('img should be PIL Image. Got {}' .format (type (img )))
352
401
return ImageOps .equalize (img )
0 commit comments