From c0a2522cf6f30124bd6c6594f109cc846abe444b Mon Sep 17 00:00:00 2001 From: RinZ27 <222222878+RinZ27@users.noreply.github.com> Date: Sun, 1 Feb 2026 12:58:31 +0700 Subject: [PATCH] Fix UnboundLocalErrors and improve stability in Gemini client and ConversableAgent I noticed several potential crash points where variables like 'ans' and 'arguments' could be used before initialization. - In gemini.py: Initialized 'ans' and token counters to handle edge cases where model names don't match hardcoded strings. Switched to an 'else' block for vision models to be more inclusive. - In conversable_agent.py: Initialized 'arguments' to None in both sync and async execute_function to prevent crashes when verbose=True and a function is not found. --- autogen/agentchat/conversable_agent.py | 2 ++ autogen/oai/gemini.py | 5 ++++- 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/autogen/agentchat/conversable_agent.py b/autogen/agentchat/conversable_agent.py index 1cd21f5efd46..f5907e0e85cb 100644 --- a/autogen/agentchat/conversable_agent.py +++ b/autogen/agentchat/conversable_agent.py @@ -2229,6 +2229,7 @@ def execute_function(self, func_call, verbose: bool = False) -> Tuple[bool, Dict func = self._function_map.get(func_name, None) is_exec_success = False + arguments = None if func is not None: # Extract arguments from a json-like string and put it into a dict. input_string = self._format_json_str(func_call.get("arguments", "{}")) @@ -2288,6 +2289,7 @@ async def a_execute_function(self, func_call): func = self._function_map.get(func_name, None) is_exec_success = False + arguments = None if func is not None: # Extract arguments from a json-like string and put it into a dict. input_string = self._format_json_str(func_call.get("arguments", "{}")) diff --git a/autogen/oai/gemini.py b/autogen/oai/gemini.py index 60a2062bb89c..8eeb0791ea96 100644 --- a/autogen/oai/gemini.py +++ b/autogen/oai/gemini.py @@ -176,6 +176,9 @@ def create(self, params: Dict) -> ChatCompletion: if n_response > 1: warnings.warn("Gemini only supports `n=1` for now. We only generate one response.", UserWarning) + ans = None + prompt_tokens = completion_tokens = 0 + if "vision" not in model_name: # A. create and call the chat model. gemini_messages = self._oai_messages_to_gemini_messages(messages) @@ -214,7 +217,7 @@ def create(self, params: Dict) -> ChatCompletion: prompt_tokens = model.count_tokens(chat.history[:-1]).total_tokens completion_tokens = model.count_tokens(ans).total_tokens - elif model_name == "gemini-pro-vision": + else: # B. handle the vision model if self.use_vertexai: model = GenerativeModel(