Skip to content

Commit 273b445

Browse files
authored
[tests] fix deprecated attention processor testing. (#13469)
fix deprecated attention processor testing.
1 parent 526498d commit 273b445

1 file changed

Lines changed: 6 additions & 3 deletions

File tree

tests/models/test_attention_processor.py

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,11 @@
1+
import importlib.metadata
12
import tempfile
23
import unittest
34

45
import numpy as np
56
import pytest
67
import torch
8+
from packaging import version
79

810
from diffusers import DiffusionPipeline
911
from diffusers.models.attention_processor import Attention, AttnAddedKVProcessor
@@ -87,9 +89,10 @@ def is_dist_enabled(pytestconfig):
8789
return pytestconfig.getoption("dist") == "loadfile"
8890

8991
@pytest.mark.xfail(
90-
condition=torch.device(torch_device).type == "cuda" and is_dist_enabled,
91-
reason="Test currently fails on our GPU CI because of `loadfile`. Note that it only fails when the tests are distributed from `pytest ... tests/models`. If the tests are run individually, even with `loadfile` it won't fail.",
92-
strict=True,
92+
condition=(torch.device(torch_device).type == "cuda" and is_dist_enabled)
93+
or version.parse(importlib.metadata.version("transformers")).is_devrelease,
94+
reason="Test currently fails on our GPU CI because of `loadfile` or with source installation of transformers due to CLIPTextModel key prefix changes.",
95+
strict=False,
9396
)
9497
def test_conversion_when_using_device_map(self):
9598
pipe = DiffusionPipeline.from_pretrained(

0 commit comments

Comments
 (0)