Skip to content

Commit

Permalink
Add tests for usage
Browse files Browse the repository at this point in the history
  • Loading branch information
jackmpcollins committed May 16, 2024
1 parent 1877ecf commit 6bcaf5b
Show file tree
Hide file tree
Showing 2 changed files with 54 additions and 1 deletion.
28 changes: 27 additions & 1 deletion tests/chat_model/test_anthropic_chat_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,12 +2,13 @@

from magentic.chat_model.anthropic_chat_model import AnthropicChatModel
from magentic.chat_model.base import StructuredOutputError
from magentic.chat_model.message import UserMessage
from magentic.chat_model.message import Usage, UserMessage
from magentic.function_call import (
AsyncParallelFunctionCall,
FunctionCall,
ParallelFunctionCall,
)
from magentic.streaming import AsyncStreamedStr, StreamedStr


@pytest.mark.parametrize(
Expand All @@ -28,6 +29,18 @@ def test_anthropic_chat_model_complete(prompt, output_types, expected_output_typ
assert isinstance(message.content, expected_output_type)


@pytest.mark.anthropic
def test_anthropic_chat_model_complete_usage():
chat_model = AnthropicChatModel("claude-3-haiku-20240307")
message = chat_model.complete(
messages=[UserMessage("Say hello!")], output_types=[StreamedStr]
)
str(message.content) # Finish the stream
assert isinstance(message.usage, Usage)
assert message.usage.input_tokens > 0
assert message.usage.output_tokens > 0


@pytest.mark.anthropic
def test_anthropic_chat_model_complete_raise_structured_output_error():
chat_model = AnthropicChatModel("claude-3-haiku-20240307")
Expand Down Expand Up @@ -100,6 +113,19 @@ async def test_anthropic_chat_model_acomplete(
assert isinstance(message.content, expected_output_type)


@pytest.mark.asyncio
@pytest.mark.anthropic
async def test_anthropic_chat_model_acomplete_usage():
chat_model = AnthropicChatModel("claude-3-haiku-20240307")
message = await chat_model.acomplete(
messages=[UserMessage("Say hello!")], output_types=[AsyncStreamedStr]
)
await message.content.to_string() # Finish the stream
assert isinstance(message.usage, Usage)
assert message.usage.input_tokens > 0
assert message.usage.output_tokens > 0


@pytest.mark.asyncio
@pytest.mark.anthropic
async def test_anthropic_chat_model_acomplete_function_call():
Expand Down
27 changes: 27 additions & 0 deletions tests/chat_model/test_openai_chat_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,13 +10,15 @@
FunctionResultMessage,
Message,
SystemMessage,
Usage,
UserMessage,
)
from magentic.chat_model.openai_chat_model import (
OpenaiChatModel,
message_to_openai_message,
)
from magentic.function_call import FunctionCall, ParallelFunctionCall
from magentic.streaming import AsyncStreamedStr, StreamedStr


def plus(a: int, b: int) -> int:
Expand Down Expand Up @@ -141,6 +143,18 @@ def test_openai_chat_model_complete_seed():
assert message1.content == message2.content


@pytest.mark.openai
def test_openai_chat_model_complete_usage():
chat_model = OpenaiChatModel("gpt-3.5-turbo")
message = chat_model.complete(
messages=[UserMessage("Say hello!")], output_types=[StreamedStr]
)
str(message.content) # Finish the stream
assert isinstance(message.usage, Usage)
assert message.usage.input_tokens > 0
assert message.usage.output_tokens > 0


@pytest.mark.openai
def test_openai_chat_model_complete_no_structured_output_error():
chat_model = OpenaiChatModel("gpt-3.5-turbo")
Expand All @@ -152,3 +166,16 @@ def test_openai_chat_model_complete_no_structured_output_error():
output_types=[int, bool],
)
assert isinstance(message.content, int | bool)


@pytest.mark.asyncio
@pytest.mark.openai
async def test_openai_chat_model_acomplete_usage():
chat_model = OpenaiChatModel("gpt-3.5-turbo")
message = await chat_model.acomplete(
messages=[UserMessage("Say hello!")], output_types=[AsyncStreamedStr]
)
await message.content.to_string() # Finish the stream
assert isinstance(message.usage, Usage)
assert message.usage.input_tokens > 0
assert message.usage.output_tokens > 0

0 comments on commit 6bcaf5b

Please sign in to comment.