feat:基于不同心流单独的发送器实例,反正能跑,但我也不知道能不能跑

睡觉
This commit is contained in:
SengokuCola
2025-04-23 00:41:46 +08:00
parent bcf295905e
commit 2eecd746af
14 changed files with 568 additions and 672 deletions

View File

@@ -7,7 +7,6 @@ from src.plugins.chat.message import MessageRecv, BaseMessageInfo, MessageThinki
from src.plugins.chat.message import MessageSet, Seg # Local import needed after move
from src.plugins.chat.chat_stream import ChatStream
from src.plugins.chat.message import UserInfo
from src.heart_flow.heartflow import heartflow, SubHeartflow
from src.plugins.chat.chat_stream import chat_manager
from src.common.logger import get_module_logger, LogConfig, PFC_STYLE_CONFIG # 引入 DEFAULT_CONFIG
from src.plugins.models.utils_model import LLMRequest
@@ -18,7 +17,7 @@ from src.plugins.utils.timer_calculater import Timer # <--- Import Timer
from .heartFC_generator import ResponseGenerator # Assuming this is the type for gpt
from src.do_tool.tool_use import ToolUser
from src.plugins.chat.emoji_manager import EmojiManager # Assuming this is the type
from .heartflow_message_sender import MessageManager # Assuming this is the type
from ..chat.message_sender import message_manager # <-- Import the global manager
# --- End import ---
@@ -37,7 +36,8 @@ logger = get_module_logger("HeartFCLoop", config=interest_log_config) # Logger
if TYPE_CHECKING:
# Keep this if HeartFCController methods are still needed elsewhere,
# but the instance variable will be removed from HeartFChatting
from .heartFC_controler import HeartFCController
# from .heartFC_controler import HeartFCController
from src.heart_flow.heartflow import SubHeartflow, heartflow # <-- 同时导入 heartflow 实例用于类型检查
PLANNER_TOOL_DEFINITION = [
{
@@ -76,54 +76,51 @@ class HeartFChatting:
def __init__(self,
chat_id: str,
# --- Explicit Dependencies ---
gpt_instance: ResponseGenerator,
tool_user_instance: ToolUser,
emoji_manager_instance: EmojiManager,
message_manager_instance: MessageManager
# --- End Explicit Dependencies ---
# 显式依赖注入
gpt_instance: ResponseGenerator, # 文本回复生成器
tool_user_instance: ToolUser, # 工具使用实例
emoji_manager_instance: EmojiManager, # 表情管理实例
):
"""
初始化HeartFChatting实例。
Args:
chat_id: The identifier for the chat stream (e.g., stream_id).
gpt_instance: The ResponseGenerator instance for generating text replies.
tool_user_instance: The ToolUser instance for using tools.
emoji_manager_instance: The EmojiManager instance for handling emojis.
message_manager_instance: The MessageManager instance for sending/managing messages.
HeartFChatting 初始化函数
参数:
chat_id: 聊天流唯一标识符(如stream_id)
gpt_instance: 文本回复生成器实例
tool_user_instance: 工具使用实例
emoji_manager_instance: 表情管理实例
"""
self.stream_id: str = chat_id
self.chat_stream: Optional[ChatStream] = None
self.sub_hf: Optional[SubHeartflow] = None
self._initialized = False
self._init_lock = asyncio.Lock() # Ensure initialization happens only once
self._processing_lock = asyncio.Lock() # 确保只有一个 Plan-Replier-Sender 周期在运行
self._timer_lock = asyncio.Lock() # 用于安全更新计时器
# 基础属性
self.stream_id: str = chat_id # 聊天流ID
self.chat_stream: Optional[ChatStream] = None # 关联的聊天流
self.sub_hf: Optional[SubHeartflow] = None # 关联的子心流
# 初始化状态控制
self._initialized = False # 是否已初始化标志
self._init_lock = asyncio.Lock() # 初始化锁(确保只初始化一次)
self._processing_lock = asyncio.Lock() # 处理锁(确保单次Plan-Replier-Sender周期)
self._timer_lock = asyncio.Lock() # 计时器锁(安全更新计时器)
# --- Store Dependencies ---
self.gpt_instance = gpt_instance
self.tool_user = tool_user_instance
self.emoji_manager = emoji_manager_instance
self.message_manager = message_manager_instance
# --- End Store Dependencies ---
# 依赖注入存储
self.gpt_instance = gpt_instance # 文本回复生成器
self.tool_user = tool_user_instance # 工具使用实例
self.emoji_manager = emoji_manager_instance # 表情管理实例
# Access LLM config through global_config or pass if needed
# LLM规划器配置
self.planner_llm = LLMRequest(
model=global_config.llm_normal,
temperature=global_config.llm_normal["temp"],
max_tokens=1000,
request_type="action_planning",
request_type="action_planning", # 用于动作规划
)
# Internal state for loop control
self._loop_timer: float = 0.0 # Remaining time for the loop in seconds
self._loop_active: bool = False # Is the loop currently running?
self._loop_task: Optional[asyncio.Task] = None # Stores the main loop task
self._trigger_count_this_activation: int = 0 # Counts triggers within an active period
# 循环控制内部状态
self._loop_timer: float = 0.0 # 循环剩余时间(秒)
self._loop_active: bool = False # 循环是否正在运行
self._loop_task: Optional[asyncio.Task] = None # 主循环任务
self._trigger_count_this_activation: int = 0 # 当前激活周期内的触发计数
self._initial_duration: float = INITIAL_DURATION # 首次触发增加的时间
self._last_added_duration: float = self._initial_duration # <--- 新增:存储上次增加的时间
self._last_added_duration: float = self._initial_duration # 上次增加的时间
def _get_log_prefix(self) -> str:
"""获取日志前缀,包含可读的流名称"""
@@ -146,6 +143,8 @@ class HeartFChatting:
logger.error(f"{log_prefix} 获取ChatStream失败。")
return False
# <-- 在这里导入 heartflow 实例
from src.heart_flow.heartflow import heartflow
self.sub_hf = heartflow.get_subheartflow(self.stream_id)
if not self.sub_hf:
logger.warning(f"{log_prefix} 获取SubHeartflow失败。一些功能可能受限。")
@@ -245,7 +244,7 @@ class HeartFChatting:
cycle_timers = {} # <--- Initialize timers dict for this cycle
# Access MessageManager directly
if self.message_manager.check_if_sending_message_exist(self.stream_id, thinking_id):
if message_manager.check_if_sending_message_exist(self.stream_id, thinking_id):
# logger.info(f"{log_prefix} HeartFChatting: 11111111111111111111111111111111麦麦还在发消息等会再规划")
await asyncio.sleep(1)
continue
@@ -318,7 +317,7 @@ class HeartFChatting:
)
except Exception as e_replier:
logger.error(f"{log_prefix} 循环: 回复器工作失败: {e_replier}")
self._cleanup_thinking_message(thinking_id)
# self._cleanup_thinking_message(thinking_id) <-- Remove cleanup call
if replier_result:
# --- Sender Work --- #
@@ -334,10 +333,10 @@ class HeartFChatting:
except Exception as e_sender:
logger.error(f"{log_prefix} 循环: 发送器失败: {e_sender}")
# _sender should handle cleanup, but double check
# self._cleanup_thinking_message(thinking_id)
# self._cleanup_thinking_message(thinking_id) <-- Remove cleanup call
else:
logger.warning(f"{log_prefix} 循环: 回复器未产生结果. 跳过发送.")
self._cleanup_thinking_message(thinking_id)
# self._cleanup_thinking_message(thinking_id) <-- Remove cleanup call
elif action == "emoji_reply":
logger.info(
f"{log_prefix} HeartFChatting: 麦麦决定回复表情 ('{emoji_query}'). 理由: {reasoning}"
@@ -652,16 +651,25 @@ class HeartFChatting:
logger.error(traceback.format_exc())
return None
def _cleanup_thinking_message(self, thinking_id: str):
"""Safely removes the thinking message."""
log_prefix = self._get_log_prefix()
try:
# Access MessageManager directly
container = self.message_manager.get_container(self.stream_id)
container.remove_message(thinking_id, msg_type=MessageThinking)
logger.debug(f"{log_prefix} Cleaned up thinking message {thinking_id}.")
except Exception as e:
logger.error(f"{log_prefix} Error cleaning up thinking message {thinking_id}: {e}")
# def _cleanup_thinking_message(self, thinking_id: str):
# """Safely removes the thinking message."""
# log_prefix = self._get_log_prefix()
# try:
# # Access MessageManager directly
# container = await message_manager.get_container(self.stream_id)
# # container.remove_message(thinking_id, msg_type=MessageThinking) # Need to find the message object first
# found_msg = None
# for msg in container.get_all_messages():
# if isinstance(msg, MessageThinking) and msg.message_info.message_id == thinking_id:
# found_msg = msg
# break
# if found_msg:
# container.remove_message(found_msg)
# logger.debug(f"{log_prefix} Cleaned up thinking message {thinking_id}.")
# else:
# logger.warning(f"{log_prefix} Could not find thinking message {thinking_id} to cleanup.")
# except Exception as e:
# logger.error(f"{log_prefix} Error cleaning up thinking message {thinking_id}: {e}")
# --- 发送器 (Sender) --- #
async def _sender(
@@ -774,10 +782,10 @@ class HeartFChatting:
# Ensure generate_response has access to current_mind if it's crucial context
# Access gpt_instance directly
response_set = await self.gpt_instance.generate_response(
self.sub_hf,
reason,
anchor_message, # Pass anchor_message positionally (matches 'message' parameter)
thinking_id, # Pass thinking_id positionally
current_mind_info=self.sub_hf.current_mind,
reason=reason,
message=anchor_message, # Pass anchor_message positionally (matches 'message' parameter)
thinking_id=thinking_id, # Pass thinking_id positionally
)
if not response_set:
@@ -818,7 +826,7 @@ class HeartFChatting:
thinking_start_time=thinking_time_point,
)
# Access MessageManager directly
self.message_manager.add_message(thinking_message)
await message_manager.add_message(thinking_message)
return thinking_id
async def _send_response_messages(
@@ -831,7 +839,7 @@ class HeartFChatting:
chat = anchor_message.chat_stream
# Access MessageManager directly
container = self.message_manager.get_container(chat.stream_id)
container = await message_manager.get_container(chat.stream_id)
thinking_message = None
# 移除思考消息
@@ -875,7 +883,7 @@ class HeartFChatting:
message_set.add_message(bot_message)
# Access MessageManager directly
self.message_manager.add_message(message_set)
await message_manager.add_message(message_set)
return first_bot_msg
async def _handle_emoji(self, anchor_message: Optional[MessageRecv], response_set: List[str], send_emoji: str = ""):
@@ -917,4 +925,4 @@ class HeartFChatting:
is_emoji=True,
)
# Access MessageManager directly
self.message_manager.add_message(bot_message)
await message_manager.add_message(bot_message)

View File

@@ -11,7 +11,6 @@ from src.plugins.respon_info_catcher.info_catcher import info_catcher_manager
from ..utils.timer_calculater import Timer
from src.plugins.moods.moods import MoodManager
from src.heart_flow.sub_heartflow import SubHeartflow
# 定义日志配置
llm_config = LogConfig(
# 使用消息发送专用样式
@@ -39,7 +38,7 @@ class ResponseGenerator:
async def generate_response(
self,
sub_hf: SubHeartflow,
current_mind_info: str,
reason: str,
message: MessageRecv,
thinking_id: str,
@@ -56,7 +55,7 @@ class ResponseGenerator:
current_model = self.model_normal
current_model.temperature = global_config.llm_normal["temp"] * arousal_multiplier # 激活度越高,温度越高
model_response = await self._generate_response_with_model(
sub_hf, reason, message, current_model, thinking_id
current_mind_info, reason, message, current_model, thinking_id
)
if model_response:
@@ -71,7 +70,7 @@ class ResponseGenerator:
return None
async def _generate_response_with_model(
self, sub_hf: SubHeartflow, reason: str, message: MessageRecv, model: LLMRequest, thinking_id: str
self, current_mind_info: str, reason: str, message: MessageRecv, model: LLMRequest, thinking_id: str
) -> str:
sender_name = ""
@@ -84,9 +83,10 @@ class ResponseGenerator:
prompt = await prompt_builder.build_prompt(
build_mode="focus",
reason=reason,
current_mind_info=current_mind_info,
message_txt=message.processed_plain_text,
sender_name=sender_name,
subheartflow=sub_hf
chat_stream=message.chat_stream
)
logger.info(f"构建prompt时间: {t_build_prompt.human_readable}")

View File

@@ -1,241 +0,0 @@
import asyncio
import time
from typing import Dict, List, Optional, Union
from src.common.logger import get_module_logger
from ..message.api import global_api
from ..chat.message import MessageSending, MessageThinking, MessageSet
from ..storage.storage import MessageStorage
from ...config.config import global_config
from ..chat.utils import truncate_message, calculate_typing_time, count_messages_between
from src.common.logger import LogConfig, SENDER_STYLE_CONFIG
# 定义日志配置
sender_config = LogConfig(
# 使用消息发送专用样式
console_format=SENDER_STYLE_CONFIG["console_format"],
file_format=SENDER_STYLE_CONFIG["file_format"],
)
logger = get_module_logger("msg_sender", config=sender_config)
class MessageSender:
"""发送器"""
_instance = None
def __new__(cls, *args, **kwargs):
if cls._instance is None:
cls._instance = super(MessageSender, cls).__new__(cls, *args, **kwargs)
return cls._instance
def __init__(self):
# 确保 __init__ 只被调用一次
if not hasattr(self, "_initialized"):
self.message_interval = (0.5, 1) # 消息间隔时间范围(秒)
self.last_send_time = 0
self._current_bot = None
self._initialized = True
def set_bot(self, bot):
"""设置当前bot实例"""
pass
async def send_via_ws(self, message: MessageSending) -> None:
try:
await global_api.send_message(message)
except Exception as e:
raise ValueError(f"未找到平台:{message.message_info.platform} 的url配置请检查配置文件") from e
async def send_message(
self,
message: MessageSending,
) -> None:
"""发送消息"""
message_json = message.to_dict()
message_preview = truncate_message(message.processed_plain_text)
try:
end_point = global_config.api_urls.get(message.message_info.platform, None)
if end_point:
try:
await global_api.send_message_rest(end_point, message_json)
except Exception as e:
logger.error(f"REST方式发送失败出现错误: {str(e)}")
logger.info("尝试使用ws发送")
await self.send_via_ws(message)
else:
await self.send_via_ws(message)
logger.success(f"发送消息 {message_preview} 成功")
except Exception as e:
logger.error(f"发送消息 {message_preview} 失败: {str(e)}")
class MessageContainer:
"""单个聊天流的发送/思考消息容器"""
def __init__(self, chat_id: str, max_size: int = 100):
self.chat_id = chat_id
self.max_size = max_size
self.messages = []
self.last_send_time = 0
def count_thinking_messages(self) -> int:
"""计算当前容器中思考消息的数量"""
return sum(1 for msg in self.messages if isinstance(msg, MessageThinking))
def get_earliest_message(self) -> Optional[Union[MessageThinking, MessageSending]]:
"""获取thinking_start_time最早的消息对象"""
if not self.messages:
return None
earliest_time = float("inf")
earliest_message = None
for msg in self.messages:
msg_time = msg.thinking_start_time
if msg_time < earliest_time:
earliest_time = msg_time
earliest_message = msg
return earliest_message
def add_message(self, message: Union[MessageThinking, MessageSending]) -> None:
"""添加消息到队列"""
if isinstance(message, MessageSet):
for single_message in message.messages:
self.messages.append(single_message)
else:
self.messages.append(message)
def remove_message(self, message: Union[MessageThinking, MessageSending]) -> bool:
"""移除消息如果消息存在则返回True否则返回False"""
try:
if message in self.messages:
self.messages.remove(message)
return True
return False
except Exception:
logger.exception("移除消息时发生错误")
return False
def has_messages(self) -> bool:
"""检查是否有待发送的消息"""
return bool(self.messages)
def get_all_messages(self) -> List[Union[MessageSending, MessageThinking]]:
"""获取所有消息"""
return list(self.messages)
class MessageManager:
"""管理所有聊天流的消息容器"""
_instance = None
_lock = asyncio.Lock()
def __init__(self):
if MessageManager._instance is not None:
raise Exception("This class is a singleton!")
else:
self.containers: Dict[str, MessageContainer] = {}
self._container_lock = asyncio.Lock()
self.running = True
MessageManager._instance = self
async def start(self):
"""Starts the background processor task."""
asyncio.create_task(self.start_processor())
logger.info("MessageManager processor task started.")
def get_container(self, chat_id: str) -> MessageContainer:
"""获取或创建聊天流的消息容器"""
if chat_id not in self.containers:
self.containers[chat_id] = MessageContainer(chat_id)
return self.containers[chat_id]
def add_message(self, message: Union[MessageThinking, MessageSending, MessageSet]) -> None:
chat_stream = message.chat_stream
if not chat_stream:
raise ValueError("无法找到对应的聊天流")
container = self.get_container(chat_stream.stream_id)
container.add_message(message)
def check_if_sending_message_exist(self, chat_id, thinking_id):
"""检查指定聊天流的容器中是否存在具有特定 thinking_id 的 MessageSending 消息"""
container = self.get_container(chat_id)
if container.has_messages():
for message in container.get_all_messages():
# 首先确保是 MessageSending 类型
if isinstance(message, MessageSending):
# 然后再访问 message_info.message_id
# 检查 message_id 是否匹配 thinking_id 或以 "me" 开头
if message.message_info.message_id == thinking_id or message.message_info.message_id[:2] == "me":
# print(f"检查到存在相同thinking_id的消息: {message.message_info.message_id}???{thinking_id}")
return True
return False
async def process_chat_messages(self, chat_id: str):
"""处理聊天流消息"""
container = self.get_container(chat_id)
if container.has_messages():
# print(f"处理有message的容器chat_id: {chat_id}")
message_earliest = container.get_earliest_message()
if isinstance(message_earliest, MessageThinking):
"""取得了思考消息"""
message_earliest.update_thinking_time()
thinking_time = message_earliest.thinking_time
# print(thinking_time)
print(
f"消息正在思考中,已思考{int(thinking_time)}\r",
end="",
flush=True,
)
# 检查是否超时
if thinking_time > global_config.thinking_timeout:
logger.warning(f"消息思考超时({thinking_time}秒),移除该消息")
container.remove_message(message_earliest)
else:
"""取得了发送消息"""
thinking_time = message_earliest.update_thinking_time()
thinking_start_time = message_earliest.thinking_start_time
now_time = time.time()
thinking_messages_count, thinking_messages_length = count_messages_between(
start_time=thinking_start_time, end_time=now_time, stream_id=message_earliest.chat_stream.stream_id
)
await message_earliest.process()
# 获取 MessageSender 的单例实例并发送消息
typing_time = calculate_typing_time(
input_string=message_earliest.processed_plain_text,
thinking_start_time=message_earliest.thinking_start_time,
is_emoji=message_earliest.is_emoji,
)
await asyncio.sleep(typing_time)
await MessageSender().send_message(message_earliest)
await self.storage.store_message(message_earliest, message_earliest.chat_stream)
container.remove_message(message_earliest)
async def start_processor(self):
"""启动消息处理器"""
while self.running:
await asyncio.sleep(1)
tasks = []
for chat_id in list(self.containers.keys()): # 使用 list 复制 key防止在迭代时修改字典
tasks.append(self.process_chat_messages(chat_id))
if tasks: # 仅在有任务时执行 gather
await asyncio.gather(*tasks)
# # 创建全局消息管理器实例 # 已改为单例模式
# message_manager = MessageManager()
# # 创建全局发送器实例 # 已改为单例模式
# message_sender = MessageSender()

View File

@@ -1,6 +1,5 @@
import random
from typing import Optional
from src.heart_flow.sub_heartflow import SubHeartflow
from ...config.config import global_config
from ..chat.chat_stream import chat_manager
from src.common.logger import get_module_logger
@@ -82,23 +81,20 @@ class PromptBuilder:
async def build_prompt(
self, build_mode,reason, message_txt: str, sender_name: str = "某人",subheartflow: SubHeartflow =None
self, build_mode,reason,current_mind_info, message_txt: str, sender_name: str = "某人",chat_stream=None
) -> tuple[str, str]:
chat_stream = chat_manager.get_stream(subheartflow.subheartflow_id)
if build_mode == "normal":
return await self._build_prompt_normal(chat_stream, message_txt, sender_name, subheartflow)
return await self._build_prompt_normal(chat_stream, message_txt, sender_name)
elif build_mode == "focus":
return await self._build_prompt_focus(reason, chat_stream, message_txt, sender_name, subheartflow)
return await self._build_prompt_focus(reason, current_mind_info, chat_stream, message_txt, sender_name)
async def _build_prompt_focus(
self, reason, chat_stream, message_txt: str, sender_name: str = "某人", subheartflow: SubHeartflow =None
self, reason, current_mind_info, chat_stream, message_txt: str, sender_name: str = "某人"
) -> tuple[str, str]:
current_mind_info = subheartflow.current_mind
individuality = Individuality.get_instance()
prompt_personality = individuality.get_prompt(type="personality", x_person=2, level=1)
@@ -107,7 +103,6 @@ class PromptBuilder:
# 日程构建
# schedule_prompt = f'''你现在正在做的事情是:{bot_schedule.get_current_num_task(num = 1,time_info = False)}'''
chat_stream = chat_manager.get_stream(subheartflow.subheartflow_id)
if chat_stream.group_info:
chat_in_group = True
else:
@@ -186,7 +181,7 @@ class PromptBuilder:
async def _build_prompt_normal(
self, chat_stream, message_txt: str, sender_name: str = "某人", subheartflow=None
self, chat_stream, message_txt: str, sender_name: str = "某人"
) -> tuple[str, str]:
# 开始构建prompt
prompt_personality = ""
@@ -209,7 +204,7 @@ class PromptBuilder:
(chat_stream.user_info.platform, chat_stream.user_info.user_id, chat_stream.user_info.user_nickname)
]
who_chat_in_group += get_recent_group_speaker(
subheartflow.subheartflow_id,
chat_stream.stream_id,
(chat_stream.user_info.platform, chat_stream.user_info.user_id),
limit=global_config.MAX_CONTEXT_SIZE,
)
@@ -249,7 +244,6 @@ class PromptBuilder:
# schedule_prompt = f"""你现在正在做的事情是:{bot_schedule.get_current_num_task(num=1, time_info=False)}"""
# 获取聊天上下文
chat_stream = chat_manager.get_stream(subheartflow.subheartflow_id)
if chat_stream.group_info:
chat_in_group = True
else:

View File

@@ -9,7 +9,7 @@ from ...config.config import global_config
from ..chat.emoji_manager import emoji_manager
from .normal_chat_generator import ResponseGenerator
from ..chat.message import MessageSending, MessageRecv, MessageThinking, MessageSet
from ..chat.messagesender import message_manager
from ..chat.message_sender import message_manager
from ..storage.storage import MessageStorage
from ..chat.utils import is_mentioned_bot_in_message
from ..chat.utils_image import image_path_to_base64
@@ -96,18 +96,18 @@ class NormalChat:
@staticmethod
async def _send_response_messages(message, chat, response_set: List[str], thinking_id) -> MessageSending:
"""发送回复消息"""
container = message_manager.get_container(chat.stream_id)
container = await message_manager.get_container(chat.stream_id)
thinking_message = None
for msg in container.messages:
for msg in container.messages[:]:
if isinstance(msg, MessageThinking) and msg.message_info.message_id == thinking_id:
thinking_message = msg
container.messages.remove(msg)
break
if not thinking_message:
logger.warning("未找到对应的思考消息,可能已超时被移除")
return
logger.warning(f"[{chat.stream_id}] 未找到对应的思考消息 {thinking_id},可能已超时被移除")
return None
thinking_start_time = thinking_message.thinking_start_time
message_set = MessageSet(chat, thinking_id)
@@ -130,12 +130,14 @@ class NormalChat:
is_head=not mark_head,
is_emoji=False,
thinking_start_time=thinking_start_time,
apply_set_reply_logic=True
)
if not mark_head:
mark_head = True
first_bot_msg = bot_message
message_set.add_message(bot_message)
message_manager.add_message(message_set)
await message_manager.add_message(message_set)
return first_bot_msg
@@ -164,8 +166,9 @@ class NormalChat:
reply=message,
is_head=False,
is_emoji=True,
apply_set_reply_logic=True
)
message_manager.add_message(bot_message)
await message_manager.add_message(bot_message)
async def _update_relationship(self, message: MessageRecv, response_set):
"""更新关系情绪"""
@@ -328,12 +331,13 @@ class NormalChat:
if not response_set:
logger.info(f"[{chat.stream_id}] 模型未生成回复内容")
# 如果模型未生成回复,移除思考消息
container = message_manager.get_container(chat.stream_id)
container = await message_manager.get_container(chat.stream_id)
# thinking_message = None
for msg in container.messages[:]: # Iterate over a copy
if isinstance(msg, MessageThinking) and msg.message_info.message_id == thinking_id:
# thinking_message = msg
container.messages.remove(msg)
# container.remove_message(msg) # 直接移除
logger.debug(f"[{chat.stream_id}] 已移除未产生回复的思考消息 {thinking_id}")
break
return # 不发送回复

View File

@@ -86,9 +86,10 @@ class ResponseGenerator:
prompt = await prompt_builder.build_prompt(
build_mode="normal",
reason= "",
current_mind_info="",
message_txt=message.processed_plain_text,
sender_name=sender_name,
subheartflow=sub_hf,
chat_stream=message.chat_stream,
)
logger.info(f"构建prompt时间: {t_build_prompt.human_readable}")