Skip to content

Commit

Permalink
small fix (#12634)
Browse files Browse the repository at this point in the history
  • Loading branch information
MeouSker77 authored Dec 30, 2024
1 parent 2d08155 commit f289f68
Show file tree
Hide file tree
Showing 2 changed files with 1 addition and 3 deletions.
1 change: 0 additions & 1 deletion python/llm/src/ipex_llm/transformers/low_bit_linear.py
Original file line number Diff line number Diff line change
Expand Up @@ -644,7 +644,6 @@ def forward(self, x: torch.Tensor):
if x0.device.type == "xpu":
# GPU logic
try:
import intel_extension_for_pytorch
import xe_linear
from ipex_llm.transformers.models.utils import use_xmx
except ModuleNotFoundError:
Expand Down
3 changes: 1 addition & 2 deletions python/llm/src/ipex_llm/transformers/models/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -346,8 +346,7 @@ def use_decoding_fast_path(proj,
def use_xmx(x: torch.Tensor, qtype: int):
device = get_xpu_device_type(x)
return (
os.environ.get("BIGDL_LLM_XMX_DISABLED", "0") != "1"
and device in ["arc", "flex", "pvc"]
device in ["arc", "flex", "pvc"]
and qtype in [SYM_INT4, SYM_INT8, FP8E4, FP8E5]
and (
(device == "pvc" and 1 < x.size(0) <= 16)
Expand Down

0 comments on commit f289f68

Please sign in to comment.