diff --git a/src/chat/message_manager/message_manager.py b/src/chat/message_manager/message_manager.py index 2633eeb71..19579dae4 100644 --- a/src/chat/message_manager/message_manager.py +++ b/src/chat/message_manager/message_manager.py @@ -365,6 +365,11 @@ class MessageManager: if not global_config.chat.interruption_enabled or not chat_stream or not message: return + # 检查是否正在回复 + if chat_stream.context_manager.context.is_replying: + logger.info(f"聊天流 {chat_stream.stream_id} 正在回复中,跳过打断检查") + return + # 检查是否为表情包消息 if message.is_picid or message.is_emoji: logger.info(f"消息 {message.message_id} 是表情包或Emoji,跳过打断检查") diff --git a/src/chat/replyer/default_generator.py b/src/chat/replyer/default_generator.py index 3a4273265..211e345da 100644 --- a/src/chat/replyer/default_generator.py +++ b/src/chat/replyer/default_generator.py @@ -330,6 +330,8 @@ class DefaultReplyer: model_name = "unknown_model" try: + # 设置正在回复的状态 + self.chat_stream.context_manager.context.is_replying = True content, reasoning_content, model_name, tool_call = await self.llm_generate_content(prompt) logger.debug(f"replyer生成内容: {content}") llm_response = { @@ -338,6 +340,15 @@ class DefaultReplyer: "model": model_name, "tool_calls": tool_call, } + except UserWarning as e: + raise e + except Exception as llm_e: + # 精简报错信息 + logger.error(f"LLM 生成失败: {llm_e}") + return False, None, prompt # LLM 调用失败则无法生成回复 + finally: + # 重置正在回复的状态 + self.chat_stream.context_manager.context.is_replying = False # 触发 AFTER_LLM 事件 if not from_plugin: @@ -352,12 +363,6 @@ class DefaultReplyer: raise UserWarning( f"插件{result.get_summary().get('stopped_handlers', '')}于请求后取消了内容生成" ) - except UserWarning as e: - raise e - except Exception as llm_e: - # 精简报错信息 - logger.error(f"LLM 生成失败: {llm_e}") - return False, None, prompt, selected_expressions # LLM 调用失败则无法生成回复 # 回复生成成功后,异步存储聊天记忆(不阻塞返回) try: diff --git a/src/common/data_models/message_manager_data_model.py b/src/common/data_models/message_manager_data_model.py index 060be410c..203d5e82c 100644 --- a/src/common/data_models/message_manager_data_model.py +++ b/src/common/data_models/message_manager_data_model.py @@ -52,6 +52,7 @@ class StreamContext(BaseDataModel): priority_mode: str | None = None priority_info: dict | None = None triggering_user_id: str | None = None # 触发当前聊天流的用户ID + is_replying: bool = False # 是否正在生成回复 def add_action_to_message(self, message_id: str, action: str): """