From d01091626d589d1be509c6137d18ff467635e99a Mon Sep 17 00:00:00 2001 From: Baptiste Colle Date: Thu, 25 Jul 2024 08:38:03 +0200 Subject: [PATCH] remove task validation not needed --- optimum_benchmark/backends/llama_cpp/config.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/optimum_benchmark/backends/llama_cpp/config.py b/optimum_benchmark/backends/llama_cpp/config.py index 2cb95294..93490428 100644 --- a/optimum_benchmark/backends/llama_cpp/config.py +++ b/optimum_benchmark/backends/llama_cpp/config.py @@ -26,9 +26,6 @@ class LlamaCppConfig(BackendConfig): def __post_init__(self): super().__post_init__() - if self.task not in TEXT_GENERATION_TASKS + TEXT_EMBEDDING_TASKS: - raise NotImplementedError(f"Llama.cpp does not support task {self.task}") - self.device = self.device.lower() # type: ignore if self.device not in ["cuda", "mps", "cpu"]: