Skip to content
This repository has been archived by the owner on Oct 25, 2024. It is now read-only.

Commit

Permalink
move get_gpu_family to is_gpu_available (#1479)
Browse files Browse the repository at this point in the history
  • Loading branch information
zhenwei-intel authored Apr 17, 2024
1 parent a6f3ab3 commit 38ba1a2
Show file tree
Hide file tree
Showing 4 changed files with 8 additions and 36 deletions.
28 changes: 2 additions & 26 deletions intel_extension_for_transformers/tools/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,34 +34,10 @@
def supported_gpus():
return ['flex', 'max', 'arc']

def get_gpu_family():
"""Get gpu device family info.
Return 'flex'|'max'|'arc'| 'no_gpu'| assert
Note, this function need to import intel_extension_for_pytorch
Additional info (common gpu name):
'Intel(R) Data Center GPU Flex 170'
'Intel(R) Data Center GPU Max 1100'
'Intel(R) Arc(TM) A770 Graphics'
"""

def is_intel_gpu_available():
import intel_extension_for_pytorch as ipex
if not (hasattr(torch, "xpu") and torch.xpu.is_available()):
return 'no_gpu'
return hasattr(torch, "xpu") and torch.xpu.is_available()

name = torch.xpu.get_device_name()
if torch.xpu.has_xmx() and torch.xpu.has_2d_block_array():
return "PVC"
elif torch.xpu.has_xmx() and not torch.xpu.has_2d_block_array():
return "ARC"
elif not torch.xpu.has_xmx() and not torch.xpu.has_2d_block_array():
return "MTL"
else:
warnings.warn("Unverified GPU device: {}".format(name))
return name

_ipex_available = importlib.util.find_spec("intel_extension_for_pytorch") is not None
_ipex_version = "N/A"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@
convert_to_quantized_model,
replace_linear,
)
from ...tools.utils import get_gpu_family, is_ipex_available
from ...tools.utils import is_intel_gpu_available, is_ipex_available
from accelerate import init_empty_weights
from huggingface_hub import hf_hub_download
from neural_compressor.adaptor.torch_utils.model_wrapper import WeightOnlyLinear
Expand All @@ -84,7 +84,7 @@

from typing import Union

if is_ipex_available() and get_gpu_family() != "no_gpu":
if is_ipex_available() and is_intel_gpu_available():
# pylint: disable=E0401
from intel_extension_for_pytorch.nn.utils._quantize_convert import (
WeightOnlyQuantizedLinear,
Expand Down
4 changes: 2 additions & 2 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@

result = subprocess.Popen("pip install -r requirements.txt", shell=True)
result.wait()
from intel_extension_for_transformers.tools.utils import get_gpu_family
from intel_extension_for_transformers.tools.utils import is_intel_gpu_available

def check_env_flag(name: str, default: bool = False) -> bool:
if default: # if a flag meant to be true if not set / mal-formatted
Expand All @@ -29,7 +29,7 @@ def check_env_flag(name: str, default: bool = False) -> bool:
ipex_available = importlib.util.find_spec(
"intel_extension_for_pytorch") is not None
IS_INTEL_GPU = False
if ipex_available and (get_gpu_family() != "no_gpu"):
if ipex_available and is_intel_gpu_available():
SKIP_RUNTIME = True
RUNTIME_ONLY = False
IS_INTEL_GPU = True
Expand Down
8 changes: 2 additions & 6 deletions tests/CI/test_weight_only_gpu.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,14 +24,10 @@
from intel_extension_for_transformers.transformers import GPTQConfig, RtnConfig
from math import isclose
from transformers import AutoTokenizer
from intel_extension_for_transformers.tools.utils import get_gpu_family, is_ipex_available
from intel_extension_for_transformers.tools.utils import is_intel_gpu_available, is_ipex_available
from torch.utils.data import DataLoader


if is_ipex_available():
gpu_name = get_gpu_family()


MODEL_NAME ="hf-internal-testing/tiny-random-gptj"


Expand Down Expand Up @@ -73,7 +69,7 @@ def forward(self, x):
return self.linear(x)


@unittest.skipIf(not is_ipex_available() or gpu_name == "no_gpu",
@unittest.skipIf(not is_ipex_available() or not is_intel_gpu_available(),
"There is no Intel GPU in this machine, skip this test!")
class TestArcWeightOnly(unittest.TestCase):

Expand Down

0 comments on commit 38ba1a2

Please sign in to comment.