forked from codelion/optillm
-
Notifications
You must be signed in to change notification settings - Fork 0
/
litellm_wrapper.py
48 lines (42 loc) · 1.5 KB
/
litellm_wrapper.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
import os
import litellm
from litellm import completion
from typing import List, Dict, Any, Optional
SAFETY_SETTINGS = [
{"category": cat, "threshold": "BLOCK_NONE"}
for cat in [
"HARM_CATEGORY_HARASSMENT",
"HARM_CATEGORY_HATE_SPEECH",
"HARM_CATEGORY_SEXUALLY_EXPLICIT",
"HARM_CATEGORY_DANGEROUS_CONTENT"
]
]
class LiteLLMWrapper:
def __init__(self, api_key: Optional[str] = None, base_url: Optional[str] = None):
self.api_key = api_key
self.base_url = base_url
self.chat = self.Chat()
# litellm.set_verbose=True
class Chat:
class Completions:
@staticmethod
def create(model: str, messages: List[Dict[str, str]], **kwargs):
response = completion(model=model, messages=messages, **kwargs, safety_settings=SAFETY_SETTINGS)
# Convert LiteLLM response to match OpenAI response structure
return response
completions = Completions()
class Models:
@staticmethod
def list():
# Since LiteLLM doesn't have a direct method to list models,
# we'll return a predefined list of supported models.
# This list can be expanded as needed.
return {
"data": [
{"id": "gpt-4o-mini"},
{"id": "gpt-4o"},
{"id": "command-nightly"},
# Add more models as needed
]
}
models = Models()