From 9b9dbbd74da4d6ddcd3c111b98d762ffa83685da Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E6=98=A5=E6=B2=B3=E6=99=B4?= Date: Wed, 23 Apr 2025 15:18:01 +0900 Subject: [PATCH] fix: Ensure explicit returns and correct async calls MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Adds explicit `return None` statements in several functions across different modules to improve code clarity and prevent potential issues where functions might implicitly return None. Corrects an async call in `Heartflow.deactivate_chat` by adding `await`. Updates type hints in `heartflow_prompt_builder.py`. 🤖 Generated with [Claude Code](https://claude.ai/code) Co-Authored-By: Claude --- import_openie.py | 1 + src/heart_flow/heartflow.py | 4 +++- src/plugins/heartFC_chat/heartflow_prompt_builder.py | 5 +++-- src/plugins/models/utils_model.py | 2 +- 4 files changed, 8 insertions(+), 4 deletions(-) diff --git a/import_openie.py b/import_openie.py index 5e347ef53..43fcd21fb 100644 --- a/import_openie.py +++ b/import_openie.py @@ -156,6 +156,7 @@ def main(): if handle_import_openie(openie_data, embed_manager, kg_manager) is False: logger.error("处理OpenIE数据时发生错误") return False + return None if __name__ == "__main__": diff --git a/src/heart_flow/heartflow.py b/src/heart_flow/heartflow.py index 9fa0211fd..5a7bb30c1 100644 --- a/src/heart_flow/heartflow.py +++ b/src/heart_flow/heartflow.py @@ -97,6 +97,7 @@ class MaiState(enum.Enum): return 3 elif self == MaiState.FOCUSED_CHAT: return 2 + return None def get_focused_chat_max_num(self): if self == MaiState.OFFLINE: @@ -107,6 +108,7 @@ class MaiState(enum.Enum): return 1 elif self == MaiState.FOCUSED_CHAT: return 2 + return None class MaiStateInfo: @@ -879,7 +881,7 @@ class Heartflow: if subflow.chat_state.chat_status != ChatState.ABSENT: logger.debug(f"[Heartflow Deactivate] 正在将子心流 {stream_name} 状态设置为 ABSENT。") # 调用 set_chat_state,它会处理日志和状态更新 - subflow.set_chat_state(ChatState.ABSENT) + await subflow.set_chat_state(ChatState.ABSENT) deactivated_count += 1 else: # 如果已经是 ABSENT,则无需再次设置,但记录一下检查 diff --git a/src/plugins/heartFC_chat/heartflow_prompt_builder.py b/src/plugins/heartFC_chat/heartflow_prompt_builder.py index db7023dd0..7b3595cee 100644 --- a/src/plugins/heartFC_chat/heartflow_prompt_builder.py +++ b/src/plugins/heartFC_chat/heartflow_prompt_builder.py @@ -7,7 +7,7 @@ from src.plugins.utils.chat_message_builder import build_readable_messages, get_ from src.plugins.person_info.relationship_manager import relationship_manager from src.plugins.chat.utils import get_embedding, parse_text_timestamps import time -from typing import Union +from typing import Union, Optional from ...common.database import db from ..chat.utils import get_recent_group_speaker from ..moods.moods import MoodManager @@ -80,12 +80,13 @@ class PromptBuilder: async def build_prompt( self, build_mode, reason, current_mind_info, message_txt: str, sender_name: str = "某人", chat_stream=None - ) -> tuple[str, str]: + ) -> Optional[tuple[str, str]]: if build_mode == "normal": return await self._build_prompt_normal(chat_stream, message_txt, sender_name) elif build_mode == "focus": return await self._build_prompt_focus(reason, current_mind_info, chat_stream, message_txt, sender_name) + return None async def _build_prompt_focus( self, reason, current_mind_info, chat_stream, message_txt: str, sender_name: str = "某人" diff --git a/src/plugins/models/utils_model.py b/src/plugins/models/utils_model.py index 365b15a60..e2ec7ac3d 100644 --- a/src/plugins/models/utils_model.py +++ b/src/plugins/models/utils_model.py @@ -689,7 +689,7 @@ class LLMRequest: stream_mode = request_content["stream_mode"] if response.status in policy["retry_codes"] or response.status in policy["abort_codes"]: await self._handle_error_response(response, retry_count, policy) - return + return None response.raise_for_status() result = {}