Closed
Description
See https://travis-ci.org/pytorch/vision/jobs/563558702
self = <test_transforms.Tester testMethod=test_randomresized_params>
def test_randomresized_params(self):
height = random.randint(24, 32) * 2
width = random.randint(24, 32) * 2
img = torch.ones(3, height, width)
to_pil_image = transforms.ToPILImage()
img = to_pil_image(img)
size = 100
epsilon = 0.05
for _ in range(10):
scale_min = round(random.random(), 2)
scale_range = (scale_min, scale_min + round(random.random(), 2))
aspect_min = max(round(random.random(), 2), epsilon)
aspect_ratio_range = (aspect_min, aspect_min + round(random.random(), 2))
randresizecrop = transforms.RandomResizedCrop(size, scale_range, aspect_ratio_range)
i, j, h, w = randresizecrop.get_params(img, scale_range, aspect_ratio_range)
aspect_ratio_obtained = w / h
> assert (min(aspect_ratio_range) - epsilon <= aspect_ratio_obtained <= max(aspect_ratio_range) + epsilon or
aspect_ratio_obtained == 1.0)
E AssertionError: assert (1.411764705882353 <= (1.3599999999999999 + 0.05) or 1.411764705882353 == 1.0)
E + where 1.3599999999999999 = max((0.96, 1.3599999999999999))