Skip to content

Commit

Permalink
fix: typing in prompt transformation
Browse files Browse the repository at this point in the history
  • Loading branch information
pabloogc committed Oct 27, 2023
1 parent 3dd4185 commit 880a485
Show file tree
Hide file tree
Showing 2 changed files with 4 additions and 4 deletions.
6 changes: 2 additions & 4 deletions private_gpt/components/llm/custom/sagemaker.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,6 @@
)

if TYPE_CHECKING:
from collections.abc import Callable

from llama_index.callbacks import CallbackManager
from llama_index.llms import (
CompletionResponseGen,
Expand Down Expand Up @@ -113,10 +111,10 @@ class SagemakerLLM(CustomLLM):
context_window: int = Field(
description="The maximum number of context tokens for the model."
)
messages_to_prompt: Callable[..., str] = Field(
messages_to_prompt: Any = Field(
description="The function to convert messages to a prompt.", exclude=True
)
completion_to_prompt: Callable[..., str] = Field(
completion_to_prompt: Any = Field(
description="The function to convert a completion to a prompt.", exclude=True
)
generate_kwargs: dict[str, Any] = Field(
Expand Down
2 changes: 2 additions & 0 deletions private_gpt/components/llm/llm_component.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,8 @@ def __init__(self) -> None:

self.llm = SagemakerLLM(
endpoint_name=settings.sagemaker.endpoint_name,
messages_to_prompt=messages_to_prompt,
completion_to_prompt=completion_to_prompt,
)
case "openai":
from llama_index.llms import OpenAI
Expand Down

0 comments on commit 880a485

Please sign in to comment.