Skip to content

Commit af13a90

Browse files
authored
Remove disable_full_determinism from StableVideoDiffusion xformers test. (huggingface#7039)
* update * update
1 parent 3067da1 commit af13a90

File tree

1 file changed

+3
-5
lines changed

1 file changed

+3
-5
lines changed

tests/pipelines/stable_video_diffusion/test_stable_video_diffusion.py

Lines changed: 3 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,6 @@
2222
from diffusers.utils.import_utils import is_xformers_available
2323
from diffusers.utils.testing_utils import (
2424
CaptureLogger,
25-
disable_full_determinism,
2625
enable_full_determinism,
2726
floats_tensor,
2827
numpy_cosine_similarity_distance,
@@ -34,6 +33,9 @@
3433
from ..test_pipelines_common import PipelineTesterMixin
3534

3635

36+
enable_full_determinism()
37+
38+
3739
def to_np(tensor):
3840
if isinstance(tensor, torch.Tensor):
3941
tensor = tensor.detach().cpu().numpy()
@@ -465,8 +467,6 @@ def test_model_cpu_offload_forward_pass(self, expected_max_diff=2e-4):
465467
reason="XFormers attention is only available with CUDA and `xformers` installed",
466468
)
467469
def test_xformers_attention_forwardGenerator_pass(self):
468-
disable_full_determinism()
469-
470470
expected_max_diff = 9e-4
471471

472472
if not self.test_xformers_attention:
@@ -496,8 +496,6 @@ def test_xformers_attention_forwardGenerator_pass(self):
496496
max_diff = np.abs(to_np(output_with_offload) - to_np(output_without_offload)).max()
497497
self.assertLess(max_diff, expected_max_diff, "XFormers attention should not affect the inference results")
498498

499-
enable_full_determinism()
500-
501499

502500
@slow
503501
@require_torch_gpu

0 commit comments

Comments
 (0)