We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 2f60439 commit ebed8a6Copy full SHA for ebed8a6
tests/test_modules.py
@@ -5,6 +5,7 @@
5
from torch import nn
6
7
import bitsandbytes as bnb
8
+from bitsandbytes.functional import ipex_xpu
9
from tests.helpers import get_available_devices, id_formatter
10
11
@@ -287,8 +288,8 @@ def test_linear_kbit_fp32_bias(device, module):
287
288
def test_kbit_backprop(device, module):
289
if device == "cpu":
290
pytest.xfail("Test is not yet supported on CPU")
- if device == "xpu":
291
- pytest.xfail("Missing int8_double_quant implementation XPU")
+ if device == "xpu" and ipex_xpu:
292
+ pytest.xfail("Missing int8_double_quant implementation in Triton for XPU")
293
294
b = 16
295
dim1 = 36
0 commit comments