From 8714da2bd5c2930330c600401559b106bc5c47d2 Mon Sep 17 00:00:00 2001 From: Christopher Chou <49086305+BabyChouSr@users.noreply.github.com> Date: Fri, 30 Aug 2024 20:17:29 -0700 Subject: [PATCH] Fix load test (#3508) --- error_log.txt | 0 tests/load_test.py | 5 ++--- 2 files changed, 2 insertions(+), 3 deletions(-) delete mode 100644 error_log.txt diff --git a/error_log.txt b/error_log.txt deleted file mode 100644 index e69de29bb..000000000 diff --git a/tests/load_test.py b/tests/load_test.py index a51a8ffff..f7804bcaa 100644 --- a/tests/load_test.py +++ b/tests/load_test.py @@ -39,7 +39,6 @@ async def litellm_completion(args, tokenizer, image_url=None): itl_list = [] content = "" - start = time.time() async for chunk in response: if chunk.choices[0].delta.content: end_time = time.time() @@ -101,7 +100,7 @@ async def main(args): ) # Write errors to error_log.txt - with open("error_log.txt", "a") as error_log: + with open("load_test_errors.log", "a") as error_log: for completion in all_completions: if isinstance(completion, str): error_log.write(completion + "\n") @@ -125,6 +124,6 @@ async def main(args): litellm_client = AsyncOpenAI(base_url=args.server_address, api_key="sk-1234") # Blank out contents of error_log.txt - open("error_log.txt", "w").close() + open("load_test_errors.log", "w").close() asyncio.run(main(args))