@@ -334,7 +334,7 @@ def check_over_forward(self, time_step=0, **forward_kwargs):
334334
335335 assert torch .sum (torch .abs (output - new_output )) < 1e-5 , "Scheduler outputs are not identical"
336336
337- def test_from_pretrained_save_pretrained (self ):
337+ def test_from_save_pretrained (self ):
338338 kwargs = dict (self .forward_default_kwargs )
339339
340340 num_inference_steps = kwargs .pop ("num_inference_steps" , None )
@@ -875,7 +875,7 @@ def check_over_configs(self, time_step=0, **config):
875875
876876 assert torch .sum (torch .abs (output - new_output )) < 1e-5 , "Scheduler outputs are not identical"
877877
878- def test_from_pretrained_save_pretrained (self ):
878+ def test_from_save_pretrained (self ):
879879 pass
880880
881881 def check_over_forward (self , time_step = 0 , ** forward_kwargs ):
@@ -1068,7 +1068,7 @@ def check_over_configs(self, time_step=0, **config):
10681068
10691069 assert torch .sum (torch .abs (output - new_output )) < 1e-5 , "Scheduler outputs are not identical"
10701070
1071- def test_from_pretrained_save_pretrained (self ):
1071+ def test_from_save_pretrained (self ):
10721072 pass
10731073
10741074 def check_over_forward (self , time_step = 0 , ** forward_kwargs ):
@@ -1745,7 +1745,7 @@ def check_over_configs(self, time_step=0, **config):
17451745
17461746 assert torch .sum (torch .abs (output - new_output )) < 1e-5 , "Scheduler outputs are not identical"
17471747
1748- def test_from_pretrained_save_pretrained (self ):
1748+ def test_from_save_pretrained (self ):
17491749 pass
17501750
17511751 def check_over_forward (self , time_step = 0 , ** forward_kwargs ):
0 commit comments