From d52afe4398bea33491b0986c86c6ea834b6888ff Mon Sep 17 00:00:00 2001 From: Marcos Venicius <94018427+marcos-venicius@users.noreply.github.com> Date: Fri, 19 Jan 2024 13:51:40 -0300 Subject: [PATCH 1/2] Update conversable_agent.py When using `gpt-3.5-turbo-1106` i got the error bellow ```console User_Proxy (to chat_manager): Find a latest paper about gpt-4 on arxiv and find its potential applications in software -------------------------------------------------------------------------------- >>>>>>>> USING AUTO REPLY... Traceback (most recent call last): File "/home/marcos_souza/Projects/auto-gen/2/./main.py", line 46, in user_proxy.initiate_chat( File "/home/marcos_souza/.local/lib/python3.10/site-packages/flaml/autogen/agentchat/conversable_agent.py", line 521, in initiate_chat self.send(self.generate_init_message(**context), recipient, silent=silent) File "/home/marcos_souza/.local/lib/python3.10/site-packages/flaml/autogen/agentchat/conversable_agent.py", line 324, in send recipient.receive(message, self, request_reply, silent) File "/home/marcos_souza/.local/lib/python3.10/site-packages/flaml/autogen/agentchat/conversable_agent.py", line 452, in receive reply = self.generate_reply(messages=self.chat_messages[sender], sender=sender) File "/home/marcos_souza/.local/lib/python3.10/site-packages/flaml/autogen/agentchat/conversable_agent.py", line 767, in generate_reply final, reply = reply_func(self, messages=messages, sender=sender, config=reply_func_tuple["config"]) File "/home/marcos_souza/.local/lib/python3.10/site-packages/flaml/autogen/agentchat/groupchat.py", line 118, in run_chat reply = speaker.generate_reply(sender=self) File "/home/marcos_souza/.local/lib/python3.10/site-packages/flaml/autogen/agentchat/conversable_agent.py", line 767, in generate_reply final, reply = reply_func(self, messages=messages, sender=sender, config=reply_func_tuple["config"]) File "/home/marcos_souza/.local/lib/python3.10/site-packages/flaml/autogen/agentchat/conversable_agent.py", line 635, in generate_code_execution_reply return True, f"exitcode: {exitcode} ({exitcode2str})\nCode output: {logs}" UnboundLocalError: local variable 'exitcode' referenced before assignment ``` This happens because the `logs`, `exitcode` and `exitcode2str` does not have an "initial value", so, when we not have messages no code is executed and the `exitcode`, `logs` and `exitcode2str` is not initiated, so, the code breaks as we can see above. --- flaml/autogen/agentchat/conversable_agent.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/flaml/autogen/agentchat/conversable_agent.py b/flaml/autogen/agentchat/conversable_agent.py index 813634119c..ec598edde0 100644 --- a/flaml/autogen/agentchat/conversable_agent.py +++ b/flaml/autogen/agentchat/conversable_agent.py @@ -611,6 +611,11 @@ def generate_code_execution_reply( if messages is None: messages = self._oai_messages[sender] last_n_messages = code_execution_config.pop("last_n_messages", 1) + + logs = "" + exitcode = 0 + exitcode2str = "execution succeeded" + for i in range(min(len(messages), last_n_messages)): message = messages[-(i + 1)] code_blocks = extract_code(message["content"]) From 8e9ade2f73a41679c6bf565b5fed9669c4a0b385 Mon Sep 17 00:00:00 2001 From: Marcos Venicius Date: Fri, 19 Jan 2024 14:13:28 -0300 Subject: [PATCH 2/2] implement tests to check when the default values are returned from generate_code_execution_reply --- test/autogen/agentchat/test_conversable_agent.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/test/autogen/agentchat/test_conversable_agent.py b/test/autogen/agentchat/test_conversable_agent.py index 23f4a223c2..521fcb6553 100644 --- a/test/autogen/agentchat/test_conversable_agent.py +++ b/test/autogen/agentchat/test_conversable_agent.py @@ -2,6 +2,17 @@ from flaml.autogen.agentchat import ConversableAgent +def test_generate_oai_reply_empty_messages(): + agent = ConversableAgent( + "a0", max_consecutive_auto_reply=0, + llm_config=False, human_input_mode="NEVER" + ) + + _, output = agent.generate_code_execution_reply(messages=[], config={}) + + assert output == 'exitcode: 0 (execution succeeded)\nCode output: ' + + def test_trigger(): agent = ConversableAgent("a0", max_consecutive_auto_reply=0, llm_config=False, human_input_mode="NEVER") agent1 = ConversableAgent("a1", max_consecutive_auto_reply=0, human_input_mode="NEVER") @@ -177,6 +188,7 @@ def add_num(num_to_be_added): if __name__ == "__main__": test_trigger() + test_generate_oai_reply_empty_messages() # test_context() # test_max_consecutive_auto_reply() # test_conversable_agent(pytest.monkeypatch)