Skip to content

Commit

Permalink
Add support for Anthropic prompt caching
Browse files Browse the repository at this point in the history
  • Loading branch information
wch committed Nov 2, 2024
1 parent 415ced0 commit c5f9551
Showing 1 changed file with 15 additions and 1 deletion.
16 changes: 15 additions & 1 deletion shiny/ui/_chat_normalize.py
Original file line number Diff line number Diff line change
Expand Up @@ -185,14 +185,28 @@ def can_normalize_chunk(self, chunk: Any) -> bool:
# The actual MessageStreamEvent is a generic, so isinstance() can't
# be used to check the type. Instead, we manually construct the relevant
# union of relevant classes...
return (
if (
isinstance(chunk, RawContentBlockDeltaEvent)
or isinstance(chunk, RawContentBlockStartEvent)
or isinstance(chunk, RawContentBlockStopEvent)
or isinstance(chunk, RawMessageDeltaEvent)
or isinstance(chunk, RawMessageStartEvent)
or isinstance(chunk, RawMessageStopEvent)
):
return True

# Older versions of the anthropic library don't have the beta prompt caching
# types, so we need to check for them separately. If this import fails or
# errors, then we'll end up in the Exception handler and return False.
from anthropic.types.beta.prompt_caching import (
RawPromptCachingBetaMessageStartEvent,
)

if isinstance(chunk, RawPromptCachingBetaMessageStartEvent):
return True

return False

except Exception:
return False

Expand Down

0 comments on commit c5f9551

Please sign in to comment.