From d90d4fe340b55f42ab7c69e0fc1fe08ae71f3e4d Mon Sep 17 00:00:00 2001 From: Tanishq Dubey Date: Wed, 2 Oct 2024 19:48:25 -0400 Subject: [PATCH] fix empty thoughts as a reply --- main.py | 12 +++++++++++- tools.py | 2 +- 2 files changed, 12 insertions(+), 2 deletions(-) diff --git a/main.py b/main.py index 1b23579..990587a 100644 --- a/main.py +++ b/main.py @@ -77,6 +77,8 @@ def answer_question_tools(user_input: str, conversation_history: List[dict], max emit('thinking', {'step': 'Starting'}) emit('conversation_history', {'history': conversation_history}) + last_thought_content = None + for _ in range(max_retries): response = ollama.chat(model=PRIMARY_MODEL, messages=conversation_history, tools=tool_manager.get_tools_for_ollama_dict(), stream=False) assistant_message = response['message'] @@ -105,7 +107,15 @@ def answer_question_tools(user_input: str, conversation_history: List[dict], max emit('thought', {'type': 'answer', 'content': reply_answer}) return reply_answer else: - emit('thought', {'type': 'thoughts', 'content': assistant_message['content']}) + current_thought_content = assistant_message['content'].strip() + emit('thought', {'type': 'thoughts', 'content': current_thought_content}) + + # Check for two consecutive thoughts, with the second being empty + if last_thought_content and not current_thought_content: + emit('thought', {'type': 'answer', 'content': last_thought_content}) + return last_thought_content + + last_thought_content = current_thought_content continue return f"Max iterations reached. Last response: {assistant_message['content']}" diff --git a/tools.py b/tools.py index e6deec0..d3730fa 100644 --- a/tools.py +++ b/tools.py @@ -120,4 +120,4 @@ class PythonCodeTool(Tool): except Exception as e: return f"Error executing code: {str(e)}" - return '\n'.join([f"{k}: {v}" for k, v in result.items()]) \ No newline at end of file + return '\n'.join([f"{k}:\n{v}" for k, v in result.items()]) \ No newline at end of file