diff --git a/.github/workflows/test_cli_misc.yaml b/.github/workflows/test_cli_misc.yaml index 41114a38..93202a14 100644 --- a/.github/workflows/test_cli_misc.yaml +++ b/.github/workflows/test_cli_misc.yaml @@ -54,4 +54,4 @@ jobs: pip install -e .[testing] - name: Run tests - run: pytest -s -k "cli and not (cpu or cuda or mps)" + run: pytest -s -k "cli and not (cpu or cuda)" diff --git a/optimum_benchmark/task_utils.py b/optimum_benchmark/task_utils.py index e9053e09..900ab9c5 100644 --- a/optimum_benchmark/task_utils.py +++ b/optimum_benchmark/task_utils.py @@ -69,7 +69,7 @@ def infer_library_from_model_name_or_path(model_name_or_path: str, revision: Opt model_info = huggingface_hub.model_info(model_name_or_path, revision=revision) inferred_library_name = getattr(model_info, "library_name", None) - if "gguf" in model_info.tags: + if inferred_library_name is None and "gguf" in model_info.tags: inferred_library_name = "llama_cpp" if inferred_library_name == "sentence-transformers": diff --git a/tests/configs/mps_inference_llama_cpp_embedding.yaml b/tests/configs/cpu_inference_llama_cpp_embedding.yaml similarity index 100% rename from tests/configs/mps_inference_llama_cpp_embedding.yaml rename to tests/configs/cpu_inference_llama_cpp_embedding.yaml diff --git a/tests/configs/mps_inference_llama_cpp_text_generation.yaml b/tests/configs/cpu_inference_llama_cpp_text_generation.yaml similarity index 100% rename from tests/configs/mps_inference_llama_cpp_text_generation.yaml rename to tests/configs/cpu_inference_llama_cpp_text_generation.yaml