This commit is contained in:
SengokuCola
2025-05-01 22:50:29 +08:00
parent c4a7b842f6
commit 2669572b30
9 changed files with 220 additions and 188 deletions

View File

@@ -20,7 +20,7 @@ NORMAL_CHAT_TIMEOUT_CHECK_INTERVAL_SECONDS = 60
# 新增状态评估间隔 # 新增状态评估间隔
HF_JUDGE_STATE_UPDATE_INTERVAL_SECONDS = 60 HF_JUDGE_STATE_UPDATE_INTERVAL_SECONDS = 60
# 新增私聊激活检查间隔 # 新增私聊激活检查间隔
PRIVATE_CHAT_ACTIVATION_CHECK_INTERVAL_SECONDS = 5 # 与兴趣评估类似设为5秒 PRIVATE_CHAT_ACTIVATION_CHECK_INTERVAL_SECONDS = 5 # 与兴趣评估类似设为5秒
CLEANUP_INTERVAL_SECONDS = 1200 CLEANUP_INTERVAL_SECONDS = 1200
STATE_UPDATE_INTERVAL_SECONDS = 60 STATE_UPDATE_INTERVAL_SECONDS = 60
@@ -76,7 +76,7 @@ class BackgroundTaskManager:
self._normal_chat_timeout_check_task: Optional[asyncio.Task] = None self._normal_chat_timeout_check_task: Optional[asyncio.Task] = None
self._hf_judge_state_update_task: Optional[asyncio.Task] = None self._hf_judge_state_update_task: Optional[asyncio.Task] = None
self._into_focus_task: Optional[asyncio.Task] = None self._into_focus_task: Optional[asyncio.Task] = None
self._private_chat_activation_task: Optional[asyncio.Task] = None # 新增私聊激活任务引用 self._private_chat_activation_task: Optional[asyncio.Task] = None # 新增私聊激活任务引用
self._tasks: List[Optional[asyncio.Task]] = [] # Keep track of all tasks self._tasks: List[Optional[asyncio.Task]] = [] # Keep track of all tasks
async def start_tasks(self): async def start_tasks(self):
@@ -129,9 +129,9 @@ class BackgroundTaskManager:
), ),
# 新增私聊激活任务配置 # 新增私聊激活任务配置
( (
# Use lambda to pass the interval to the runner function # Use lambda to pass the interval to the runner function
lambda: self._run_private_chat_activation_cycle(PRIVATE_CHAT_ACTIVATION_CHECK_INTERVAL_SECONDS), lambda: self._run_private_chat_activation_cycle(PRIVATE_CHAT_ACTIVATION_CHECK_INTERVAL_SECONDS),
"debug", "debug",
f"私聊激活检查任务已启动 间隔:{PRIVATE_CHAT_ACTIVATION_CHECK_INTERVAL_SECONDS}s", f"私聊激活检查任务已启动 间隔:{PRIVATE_CHAT_ACTIVATION_CHECK_INTERVAL_SECONDS}s",
"_private_chat_activation_task", "_private_chat_activation_task",
), ),
@@ -294,5 +294,5 @@ class BackgroundTaskManager:
await _run_periodic_loop( await _run_periodic_loop(
task_name="Private Chat Activation Check", task_name="Private Chat Activation Check",
interval=interval, interval=interval,
task_func=self.subheartflow_manager.sbhf_absent_private_into_focus task_func=self.subheartflow_manager.sbhf_absent_private_into_focus,
) )

View File

@@ -13,11 +13,10 @@ from src.plugins.utils.chat_message_builder import (
get_person_id_list, get_person_id_list,
) )
from src.plugins.utils.prompt_builder import Prompt, global_prompt_manager from src.plugins.utils.prompt_builder import Prompt, global_prompt_manager
from src.plugins.chat.chat_stream import chat_manager
from typing import Optional from typing import Optional
from src.plugins.person_info.person_info import person_info_manager
# Import the new utility function # Import the new utility function
from .utils_chat import get_chat_type_and_target_info from .utils_chat import get_chat_type_and_target_info
logger = get_logger("observation") logger = get_logger("observation")
@@ -26,14 +25,14 @@ Prompt(
"""这是qq群聊的聊天记录请总结以下聊天记录的主题 """这是qq群聊的聊天记录请总结以下聊天记录的主题
{chat_logs} {chat_logs}
请用一句话概括,包括人物、事件和主要信息,不要分点。""", 请用一句话概括,包括人物、事件和主要信息,不要分点。""",
"chat_summary_group_prompt" # Template for group chat "chat_summary_group_prompt", # Template for group chat
) )
Prompt( Prompt(
"""这是你和{chat_target}的私聊记录,请总结以下聊天记录的主题: """这是你和{chat_target}的私聊记录,请总结以下聊天记录的主题:
{chat_logs} {chat_logs}
请用一句话概括,包括事件,时间,和主要信息,不要分点。""", 请用一句话概括,包括事件,时间,和主要信息,不要分点。""",
"chat_summary_private_prompt" # Template for private chat "chat_summary_private_prompt", # Template for private chat
) )
# --- End Prompt Template Definition --- # --- End Prompt Template Definition ---
@@ -56,9 +55,9 @@ class ChattingObservation(Observation):
super().__init__("chat", chat_id) super().__init__("chat", chat_id)
self.chat_id = chat_id self.chat_id = chat_id
# --- Initialize attributes (defaults) --- # --- Initialize attributes (defaults) ---
self.is_group_chat: bool = False self.is_group_chat: bool = False
self.chat_target_info: Optional[dict] = None self.chat_target_info: Optional[dict] = None
# --- End Initialization --- # --- End Initialization ---
# --- Other attributes initialized in __init__ --- # --- Other attributes initialized in __init__ ---
@@ -77,11 +76,12 @@ class ChattingObservation(Observation):
model=global_config.llm_observation, temperature=0.7, max_tokens=300, request_type="chat_observation" model=global_config.llm_observation, temperature=0.7, max_tokens=300, request_type="chat_observation"
) )
async def initialize(self): async def initialize(self):
# --- Use utility function to determine chat type and fetch info --- # --- Use utility function to determine chat type and fetch info ---
self.is_group_chat, self.chat_target_info = await get_chat_type_and_target_info(self.chat_id) self.is_group_chat, self.chat_target_info = await get_chat_type_and_target_info(self.chat_id)
logger.debug(f"ChattingObservation {self.chat_id} initialized: is_group={self.is_group_chat}, target_info={self.chat_target_info}") logger.debug(
f"ChattingObservation {self.chat_id} initialized: is_group={self.is_group_chat}, target_info={self.chat_target_info}"
)
# --- End using utility function --- # --- End using utility function ---
# Fetch initial messages (existing logic) # Fetch initial messages (existing logic)
@@ -141,30 +141,33 @@ class ChattingObservation(Observation):
) )
# --- Build prompt using template --- # --- Build prompt using template ---
prompt = None # Initialize prompt as None prompt = None # Initialize prompt as None
try: try:
# 构建 Prompt - 根据 is_group_chat 选择模板 # 构建 Prompt - 根据 is_group_chat 选择模板
if self.is_group_chat: if self.is_group_chat:
prompt_template_name = "chat_summary_group_prompt" prompt_template_name = "chat_summary_group_prompt"
prompt = await global_prompt_manager.format_prompt( prompt = await global_prompt_manager.format_prompt(
prompt_template_name, prompt_template_name, chat_logs=oldest_messages_str
chat_logs=oldest_messages_str
) )
else: else:
# For private chat, add chat_target to the prompt variables # For private chat, add chat_target to the prompt variables
prompt_template_name = "chat_summary_private_prompt" prompt_template_name = "chat_summary_private_prompt"
# Determine the target name for the prompt # Determine the target name for the prompt
chat_target_name = "对方" # Default fallback chat_target_name = "对方" # Default fallback
if self.chat_target_info: if self.chat_target_info:
# Prioritize person_name, then nickname # Prioritize person_name, then nickname
chat_target_name = self.chat_target_info.get('person_name') or self.chat_target_info.get('user_nickname') or chat_target_name chat_target_name = (
self.chat_target_info.get("person_name")
or self.chat_target_info.get("user_nickname")
or chat_target_name
)
# Format the private chat prompt # Format the private chat prompt
prompt = await global_prompt_manager.format_prompt( prompt = await global_prompt_manager.format_prompt(
prompt_template_name, prompt_template_name,
# Assuming the private prompt template uses {chat_target} # Assuming the private prompt template uses {chat_target}
chat_target=chat_target_name, chat_target=chat_target_name,
chat_logs=oldest_messages_str chat_logs=oldest_messages_str,
) )
except Exception as e: except Exception as e:
logger.error(f"构建总结 Prompt 失败 for chat {self.chat_id}: {e}") logger.error(f"构建总结 Prompt 失败 for chat {self.chat_id}: {e}")
@@ -172,7 +175,7 @@ class ChattingObservation(Observation):
summary = "没有主题的闲聊" # 默认值 summary = "没有主题的闲聊" # 默认值
if prompt: # Check if prompt was built successfully if prompt: # Check if prompt was built successfully
try: try:
summary_result, _, _ = await self.llm_summary.generate_response(prompt) summary_result, _, _ = await self.llm_summary.generate_response(prompt)
if summary_result: # 确保结果不为空 if summary_result: # 确保结果不为空
@@ -183,7 +186,6 @@ class ChattingObservation(Observation):
else: else:
logger.warning(f"因 Prompt 构建失败,跳过 LLM 总结 for chat {self.chat_id}") logger.warning(f"因 Prompt 构建失败,跳过 LLM 总结 for chat {self.chat_id}")
mid_memory = { mid_memory = {
"id": str(int(datetime.now().timestamp())), "id": str(int(datetime.now().timestamp())),
"theme": summary, "theme": summary,

View File

@@ -13,7 +13,6 @@ from src.plugins.heartFC_chat.normal_chat import NormalChat
from src.heart_flow.mai_state_manager import MaiStateInfo from src.heart_flow.mai_state_manager import MaiStateInfo
from src.heart_flow.chat_state_info import ChatState, ChatStateInfo from src.heart_flow.chat_state_info import ChatState, ChatStateInfo
from src.heart_flow.sub_mind import SubMind from src.heart_flow.sub_mind import SubMind
from src.plugins.person_info.person_info import person_info_manager
from .utils_chat import get_chat_type_and_target_info from .utils_chat import get_chat_type_and_target_info
@@ -240,9 +239,9 @@ class SubHeartflow:
self.chat_state_last_time: float = 0 self.chat_state_last_time: float = 0
self.history_chat_state: List[Tuple[ChatState, float]] = [] self.history_chat_state: List[Tuple[ChatState, float]] = []
# --- Initialize attributes --- # --- Initialize attributes ---
self.is_group_chat: bool = False self.is_group_chat: bool = False
self.chat_target_info: Optional[dict] = None self.chat_target_info: Optional[dict] = None
# --- End Initialization --- # --- End Initialization ---
# 兴趣检测器 # 兴趣检测器
@@ -268,16 +267,20 @@ class SubHeartflow:
) )
# 日志前缀 - Moved determination to initialize # 日志前缀 - Moved determination to initialize
self.log_prefix = str(subheartflow_id) # Initial default prefix self.log_prefix = str(subheartflow_id) # Initial default prefix
async def initialize(self): async def initialize(self):
"""异步初始化方法,创建兴趣流并确定聊天类型""" """异步初始化方法,创建兴趣流并确定聊天类型"""
# --- Use utility function to determine chat type and fetch info --- # --- Use utility function to determine chat type and fetch info ---
self.is_group_chat, self.chat_target_info = await get_chat_type_and_target_info(self.chat_id) self.is_group_chat, self.chat_target_info = await get_chat_type_and_target_info(self.chat_id)
# Update log prefix after getting info (potential stream name) # Update log prefix after getting info (potential stream name)
self.log_prefix = chat_manager.get_stream_name(self.subheartflow_id) or self.subheartflow_id # Keep this line or adjust if utils provides name self.log_prefix = (
logger.debug(f"SubHeartflow {self.chat_id} initialized: is_group={self.is_group_chat}, target_info={self.chat_target_info}") chat_manager.get_stream_name(self.subheartflow_id) or self.subheartflow_id
) # Keep this line or adjust if utils provides name
logger.debug(
f"SubHeartflow {self.chat_id} initialized: is_group={self.is_group_chat}, target_info={self.chat_target_info}"
)
# --- End using utility function --- # --- End using utility function ---
# Initialize interest system (existing logic) # Initialize interest system (existing logic)
@@ -315,15 +318,15 @@ class SubHeartflow:
if not chat_stream: if not chat_stream:
logger.error(f"{log_prefix} 无法获取 chat_stream无法启动 NormalChat。") logger.error(f"{log_prefix} 无法获取 chat_stream无法启动 NormalChat。")
return False return False
self.normal_chat_instance = NormalChat(chat_stream=chat_stream, interest_dict=self.get_interest_dict()) self.normal_chat_instance = NormalChat(chat_stream=chat_stream, interest_dict=self.get_interest_dict())
# 进行异步初始化 # 进行异步初始化
await self.normal_chat_instance.initialize() await self.normal_chat_instance.initialize()
# 启动聊天任务 # 启动聊天任务
logger.info(f"{log_prefix} 开始普通聊天,随便水群...") logger.info(f"{log_prefix} 开始普通聊天,随便水群...")
await self.normal_chat_instance.start_chat() # start_chat now ensures init is called again if needed await self.normal_chat_instance.start_chat() # start_chat now ensures init is called again if needed
return True return True
except Exception as e: except Exception as e:
logger.error(f"{log_prefix} 启动 NormalChat 或其初始化时出错: {e}") logger.error(f"{log_prefix} 启动 NormalChat 或其初始化时出错: {e}")

View File

@@ -20,7 +20,7 @@ logger = get_logger("sub_heartflow")
def init_prompt(): def init_prompt():
# --- Group Chat Prompt --- # --- Group Chat Prompt ---
group_prompt = """ group_prompt = """
{extra_info} {extra_info}
{relation_prompt} {relation_prompt}
@@ -45,7 +45,7 @@ def init_prompt():
3. 如需处理消息或回复,请使用工具。""" 3. 如需处理消息或回复,请使用工具。"""
Prompt(group_prompt, "sub_heartflow_prompt_before") Prompt(group_prompt, "sub_heartflow_prompt_before")
# --- Private Chat Prompt --- # --- Private Chat Prompt ---
private_prompt = """ private_prompt = """
{extra_info} {extra_info}
{relation_prompt} {relation_prompt}
@@ -69,9 +69,9 @@ def init_prompt():
1. 输出想法后考虑是否需要使用工具 1. 输出想法后考虑是否需要使用工具
2. 工具可获取信息或执行操作 2. 工具可获取信息或执行操作
3. 如需处理消息或回复,请使用工具。""" 3. 如需处理消息或回复,请使用工具。"""
Prompt(private_prompt, "sub_heartflow_prompt_private_before") # New template name Prompt(private_prompt, "sub_heartflow_prompt_private_before") # New template name
# --- Last Loop Prompt (remains the same) --- # --- Last Loop Prompt (remains the same) ---
last_loop_t = """ last_loop_t = """
刚刚你的内心想法是:{current_thinking_info} 刚刚你的内心想法是:{current_thinking_info}
{if_replan_prompt} {if_replan_prompt}
@@ -152,17 +152,19 @@ class SubMind:
# 获取观察对象 # 获取观察对象
observation = self.observations[0] if self.observations else None observation = self.observations[0] if self.observations else None
if not observation or not hasattr(observation, 'is_group_chat'): # Ensure it's ChattingObservation or similar if not observation or not hasattr(observation, "is_group_chat"): # Ensure it's ChattingObservation or similar
logger.error(f"{self.log_prefix} 无法获取有效的观察对象或缺少聊天类型信息") logger.error(f"{self.log_prefix} 无法获取有效的观察对象或缺少聊天类型信息")
self.update_current_mind("(观察出错了...)") self.update_current_mind("(观察出错了...)")
return self.current_mind, self.past_mind return self.current_mind, self.past_mind
is_group_chat = observation.is_group_chat is_group_chat = observation.is_group_chat
chat_target_info = observation.chat_target_info chat_target_info = observation.chat_target_info
chat_target_name = "对方" # Default for private chat_target_name = "对方" # Default for private
if not is_group_chat and chat_target_info: if not is_group_chat and chat_target_info:
chat_target_name = chat_target_info.get('person_name') or chat_target_info.get('user_nickname') or chat_target_name chat_target_name = (
# --- End getting observation info --- chat_target_info.get("person_name") or chat_target_info.get("user_nickname") or chat_target_name
)
# --- End getting observation info ---
# 获取观察内容 # 获取观察内容
chat_observe_info = observation.get_observe_info() chat_observe_info = observation.get_observe_info()
@@ -274,7 +276,7 @@ class SubMind:
)[0] )[0]
# ---------- 4. 构建最终提示词 ---------- # ---------- 4. 构建最终提示词 ----------
# --- Choose template based on chat type --- # --- Choose template based on chat type ---
if is_group_chat: if is_group_chat:
template_name = "sub_heartflow_prompt_before" template_name = "sub_heartflow_prompt_before"
prompt = (await global_prompt_manager.get_prompt_async(template_name)).format( prompt = (await global_prompt_manager.get_prompt_async(template_name)).format(
@@ -290,22 +292,22 @@ class SubMind:
cycle_info_block=cycle_info_block, cycle_info_block=cycle_info_block,
# chat_target_name is not used in group prompt # chat_target_name is not used in group prompt
) )
else: # Private chat else: # Private chat
template_name = "sub_heartflow_prompt_private_before" template_name = "sub_heartflow_prompt_private_before"
prompt = (await global_prompt_manager.get_prompt_async(template_name)).format( prompt = (await global_prompt_manager.get_prompt_async(template_name)).format(
extra_info="", extra_info="",
prompt_personality=prompt_personality, prompt_personality=prompt_personality,
relation_prompt=relation_prompt, # Might need adjustment for private context relation_prompt=relation_prompt, # Might need adjustment for private context
bot_name=individuality.name, bot_name=individuality.name,
time_now=time_now, time_now=time_now,
chat_target_name=chat_target_name, # Pass target name chat_target_name=chat_target_name, # Pass target name
chat_observe_info=chat_observe_info, chat_observe_info=chat_observe_info,
mood_info=mood_info, mood_info=mood_info,
hf_do_next=hf_do_next, hf_do_next=hf_do_next,
last_loop_prompt=last_loop_prompt, last_loop_prompt=last_loop_prompt,
cycle_info_block=cycle_info_block, cycle_info_block=cycle_info_block,
) )
# --- End choosing template --- # --- End choosing template ---
# ---------- 5. 执行LLM请求并处理响应 ---------- # ---------- 5. 执行LLM请求并处理响应 ----------
content = "" # 初始化内容变量 content = "" # 初始化内容变量

View File

@@ -345,7 +345,8 @@ class SubHeartflowManager:
async with self._lock: async with self._lock:
# 1. 筛选出所有 ABSENT 状态的 *群聊* 子心流 # 1. 筛选出所有 ABSENT 状态的 *群聊* 子心流
absent_group_subflows = [ absent_group_subflows = [
hf for hf in self.subheartflows.values() hf
for hf in self.subheartflows.values()
if hf.chat_state.chat_status == ChatState.ABSENT and hf.is_group_chat if hf.chat_state.chat_status == ChatState.ABSENT and hf.is_group_chat
] ]
@@ -358,7 +359,7 @@ class SubHeartflowManager:
flow_id = sub_hf_to_evaluate.subheartflow_id flow_id = sub_hf_to_evaluate.subheartflow_id
stream_name = chat_manager.get_stream_name(flow_id) or flow_id stream_name = chat_manager.get_stream_name(flow_id) or flow_id
log_prefix = f"[{stream_name}]" log_prefix = f"[{stream_name}]"
# --- Private chat check (redundant due to filter above, but safe) --- # --- Private chat check (redundant due to filter above, but safe) ---
# if not sub_hf_to_evaluate.is_group_chat: # if not sub_hf_to_evaluate.is_group_chat:
# logger.debug(f"{log_prefix} 是私聊,跳过 CHAT 状态评估。") # logger.debug(f"{log_prefix} 是私聊,跳过 CHAT 状态评估。")
@@ -684,24 +685,26 @@ class SubHeartflowManager:
current_state = subflow.chat_state.chat_status current_state = subflow.chat_state.chat_status
if current_state == ChatState.FOCUSED: if current_state == ChatState.FOCUSED:
target_state = ChatState.ABSENT # Default target target_state = ChatState.ABSENT # Default target
log_reason = "默认转换 (私聊或群聊)" log_reason = "默认转换 (私聊或群聊)"
# --- Modify logic based on chat type --- # # --- Modify logic based on chat type --- #
if subflow.is_group_chat: if subflow.is_group_chat:
# Group chat: Decide between ABSENT or CHAT # Group chat: Decide between ABSENT or CHAT
if random.random() < 0.5: # 50% chance to try CHAT if random.random() < 0.5: # 50% chance to try CHAT
current_mai_state = self.mai_state_info.get_current_state() current_mai_state = self.mai_state_info.get_current_state()
chat_limit = current_mai_state.get_normal_chat_max_num() chat_limit = current_mai_state.get_normal_chat_max_num()
current_chat_count = self.count_subflows_by_state_nolock(ChatState.CHAT) current_chat_count = self.count_subflows_by_state_nolock(ChatState.CHAT)
if current_chat_count < chat_limit: if current_chat_count < chat_limit:
target_state = ChatState.CHAT target_state = ChatState.CHAT
log_reason = f"群聊随机选择 CHAT (当前 {current_chat_count}/{chat_limit})" log_reason = f"群聊随机选择 CHAT (当前 {current_chat_count}/{chat_limit})"
else: else:
target_state = ChatState.ABSENT # Fallback to ABSENT if CHAT limit reached target_state = ChatState.ABSENT # Fallback to ABSENT if CHAT limit reached
log_reason = f"群聊随机选择 CHAT 但已达上限 ({current_chat_count}/{chat_limit}),转为 ABSENT" log_reason = (
else: # 50% chance to go directly to ABSENT f"群聊随机选择 CHAT 但已达上限 ({current_chat_count}/{chat_limit}),转为 ABSENT"
)
else: # 50% chance to go directly to ABSENT
target_state = ChatState.ABSENT target_state = ChatState.ABSENT
log_reason = "群聊随机选择 ABSENT" log_reason = "群聊随机选择 ABSENT"
else: else:
@@ -732,6 +735,7 @@ class SubHeartflowManager:
logger.warning( logger.warning(
f"[状态转换请求] 收到对 {stream_name} 的请求,但其状态为 {current_state.value} (非 FOCUSED),不执行转换" f"[状态转换请求] 收到对 {stream_name} 的请求,但其状态为 {current_state.value} (非 FOCUSED),不执行转换"
) )
# --- 结束新增 --- # # --- 结束新增 --- #
# --- 新增:处理私聊从 ABSENT 直接到 FOCUSED 的逻辑 --- # # --- 新增:处理私聊从 ABSENT 直接到 FOCUSED 的逻辑 --- #
@@ -740,17 +744,17 @@ class SubHeartflowManager:
log_prefix_task = "[私聊激活检查]" log_prefix_task = "[私聊激活检查]"
transitioned_count = 0 transitioned_count = 0
checked_count = 0 checked_count = 0
# --- 获取当前状态和 FOCUSED 上限 --- # # --- 获取当前状态和 FOCUSED 上限 --- #
current_mai_state = self.mai_state_info.get_current_state() current_mai_state = self.mai_state_info.get_current_state()
focused_limit = current_mai_state.get_focused_chat_max_num() focused_limit = current_mai_state.get_focused_chat_max_num()
# --- 检查是否允许 FOCUS 模式 --- # # --- 检查是否允许 FOCUS 模式 --- #
if not global_config.allow_focus_mode: if not global_config.allow_focus_mode:
# Log less frequently to avoid spam # Log less frequently to avoid spam
# if int(time.time()) % 60 == 0: # if int(time.time()) % 60 == 0:
# logger.debug(f"{log_prefix_task} 配置不允许进入 FOCUSED 状态") # logger.debug(f"{log_prefix_task} 配置不允许进入 FOCUSED 状态")
return return
if focused_limit <= 0: if focused_limit <= 0:
# logger.debug(f"{log_prefix_task} 当前状态 ({current_mai_state.value}) 不允许 FOCUSED 子心流") # logger.debug(f"{log_prefix_task} 当前状态 ({current_mai_state.value}) 不允许 FOCUSED 子心流")
@@ -759,10 +763,11 @@ class SubHeartflowManager:
async with self._lock: async with self._lock:
# --- 获取当前 FOCUSED 计数 (不上锁版本) --- # # --- 获取当前 FOCUSED 计数 (不上锁版本) --- #
current_focused_count = self.count_subflows_by_state_nolock(ChatState.FOCUSED) current_focused_count = self.count_subflows_by_state_nolock(ChatState.FOCUSED)
# --- 筛选出所有 ABSENT 状态的私聊子心流 --- # # --- 筛选出所有 ABSENT 状态的私聊子心流 --- #
eligible_subflows = [ eligible_subflows = [
hf for hf in self.subheartflows.values() hf
for hf in self.subheartflows.values()
if hf.chat_state.chat_status == ChatState.ABSENT and not hf.is_group_chat if hf.chat_state.chat_status == ChatState.ABSENT and not hf.is_group_chat
] ]
checked_count = len(eligible_subflows) checked_count = len(eligible_subflows)
@@ -775,8 +780,10 @@ class SubHeartflowManager:
for sub_hf in eligible_subflows: for sub_hf in eligible_subflows:
# --- 再次检查 FOCUSED 上限,因为可能有多个同时激活 --- # # --- 再次检查 FOCUSED 上限,因为可能有多个同时激活 --- #
if current_focused_count >= focused_limit: if current_focused_count >= focused_limit:
logger.debug(f"{log_prefix_task} 已达专注上限 ({current_focused_count}/{focused_limit}),停止检查后续私聊。") logger.debug(
break # 已满,无需再检查其他私聊 f"{log_prefix_task} 已达专注上限 ({current_focused_count}/{focused_limit}),停止检查后续私聊。"
)
break # 已满,无需再检查其他私聊
flow_id = sub_hf.subheartflow_id flow_id = sub_hf.subheartflow_id
stream_name = chat_manager.get_stream_name(flow_id) or flow_id stream_name = chat_manager.get_stream_name(flow_id) or flow_id
@@ -784,45 +791,51 @@ class SubHeartflowManager:
try: try:
# --- 检查是否有新活动 --- # # --- 检查是否有新活动 --- #
observation = sub_hf._get_primary_observation() # 获取主要观察者 observation = sub_hf._get_primary_observation() # 获取主要观察者
is_active = False is_active = False
if observation: if observation:
# 检查自上次状态变为 ABSENT 后是否有新消息 # 检查自上次状态变为 ABSENT 后是否有新消息
# 使用 chat_state_changed_time 可能更精确 # 使用 chat_state_changed_time 可能更精确
# 加一点点缓冲时间(例如 1 秒)以防时间戳完全相等 # 加一点点缓冲时间(例如 1 秒)以防时间戳完全相等
timestamp_to_check = sub_hf.chat_state_changed_time - 1 timestamp_to_check = sub_hf.chat_state_changed_time - 1
has_new = await observation.has_new_messages_since(timestamp_to_check) has_new = await observation.has_new_messages_since(timestamp_to_check)
if has_new: if has_new:
is_active = True is_active = True
logger.debug(f"{log_prefix} 检测到新消息,标记为活跃。") logger.debug(f"{log_prefix} 检测到新消息,标记为活跃。")
# 可选检查兴趣度是否大于0 (如果需要) # 可选检查兴趣度是否大于0 (如果需要)
# interest_level = await sub_hf.interest_chatting.get_interest() # interest_level = await sub_hf.interest_chatting.get_interest()
# if interest_level > 0: # if interest_level > 0:
# is_active = True # is_active = True
# logger.debug(f"{log_prefix} 检测到兴趣度 > 0 ({interest_level:.2f}),标记为活跃。") # logger.debug(f"{log_prefix} 检测到兴趣度 > 0 ({interest_level:.2f}),标记为活跃。")
else: else:
logger.warning(f"{log_prefix} 无法获取主要观察者来检查活动状态。") logger.warning(f"{log_prefix} 无法获取主要观察者来检查活动状态。")
# --- 如果活跃且未达上限,则尝试转换 --- # # --- 如果活跃且未达上限,则尝试转换 --- #
if is_active: if is_active:
logger.info(f"{log_prefix} 检测到活跃且未达专注上限 ({current_focused_count}/{focused_limit}),尝试转换为 FOCUSED。") logger.info(
f"{log_prefix} 检测到活跃且未达专注上限 ({current_focused_count}/{focused_limit}),尝试转换为 FOCUSED。"
)
await sub_hf.change_chat_state(ChatState.FOCUSED) await sub_hf.change_chat_state(ChatState.FOCUSED)
# 确认转换成功 # 确认转换成功
if sub_hf.chat_state.chat_status == ChatState.FOCUSED: if sub_hf.chat_state.chat_status == ChatState.FOCUSED:
transitioned_count += 1 transitioned_count += 1
current_focused_count += 1 # 更新计数器以供本轮后续检查 current_focused_count += 1 # 更新计数器以供本轮后续检查
logger.info(f"{log_prefix} 成功进入 FOCUSED 状态。") logger.info(f"{log_prefix} 成功进入 FOCUSED 状态。")
else: else:
logger.warning(f"{log_prefix} 尝试进入 FOCUSED 状态失败。当前状态: {sub_hf.chat_state.chat_status.value}") logger.warning(
f"{log_prefix} 尝试进入 FOCUSED 状态失败。当前状态: {sub_hf.chat_state.chat_status.value}"
)
# else: # 不活跃,无需操作 # else: # 不活跃,无需操作
# logger.debug(f"{log_prefix} 未检测到新活动,保持 ABSENT。") # logger.debug(f"{log_prefix} 未检测到新活动,保持 ABSENT。")
except Exception as e: except Exception as e:
logger.error(f"{log_prefix} 检查私聊活动或转换状态时出错: {e}", exc_info=True) logger.error(f"{log_prefix} 检查私聊活动或转换状态时出错: {e}", exc_info=True)
# --- 循环结束后记录总结日志 --- # # --- 循环结束后记录总结日志 --- #
if transitioned_count > 0: if transitioned_count > 0:
logger.debug(f"{log_prefix_task} 完成,共检查 {checked_count} 个私聊,{transitioned_count} 个转换为 FOCUSED。") logger.debug(
f"{log_prefix_task} 完成,共检查 {checked_count} 个私聊,{transitioned_count} 个转换为 FOCUSED。"
)
# --- 结束新增 --- # # --- 结束新增 --- #

View File

@@ -6,6 +6,7 @@ from src.plugins.person_info.person_info import person_info_manager
logger = get_logger("heartflow_utils") logger = get_logger("heartflow_utils")
async def get_chat_type_and_target_info(chat_id: str) -> Tuple[bool, Optional[Dict]]: async def get_chat_type_and_target_info(chat_id: str) -> Tuple[bool, Optional[Dict]]:
""" """
获取聊天类型(是否群聊)和私聊对象信息。 获取聊天类型(是否群聊)和私聊对象信息。
@@ -14,7 +15,7 @@ async def get_chat_type_and_target_info(chat_id: str) -> Tuple[bool, Optional[Di
chat_id: 聊天流ID chat_id: 聊天流ID
Returns: Returns:
Tuple[bool, Optional[Dict]]: Tuple[bool, Optional[Dict]]:
- bool: 是否为群聊 (True 是群聊, False 是私聊或未知) - bool: 是否为群聊 (True 是群聊, False 是私聊或未知)
- Optional[Dict]: 如果是私聊,包含对方信息的字典;否则为 None。 - Optional[Dict]: 如果是私聊,包含对方信息的字典;否则为 None。
字典包含: platform, user_id, user_nickname, person_id, person_name 字典包含: platform, user_id, user_nickname, person_id, person_name
@@ -23,29 +24,29 @@ async def get_chat_type_and_target_info(chat_id: str) -> Tuple[bool, Optional[Di
chat_target_info = None chat_target_info = None
try: try:
chat_stream = await asyncio.to_thread(chat_manager.get_stream, chat_id) # Use to_thread if get_stream is sync chat_stream = await asyncio.to_thread(chat_manager.get_stream, chat_id) # Use to_thread if get_stream is sync
# If get_stream is already async, just use: chat_stream = await chat_manager.get_stream(chat_id) # If get_stream is already async, just use: chat_stream = await chat_manager.get_stream(chat_id)
if chat_stream: if chat_stream:
if chat_stream.group_info: if chat_stream.group_info:
is_group_chat = True is_group_chat = True
chat_target_info = None # Explicitly None for group chat chat_target_info = None # Explicitly None for group chat
elif chat_stream.user_info: # It's a private chat elif chat_stream.user_info: # It's a private chat
is_group_chat = False is_group_chat = False
user_info = chat_stream.user_info user_info = chat_stream.user_info
platform = chat_stream.platform platform = chat_stream.platform
user_id = user_info.user_id user_id = user_info.user_id
# Initialize target_info with basic info # Initialize target_info with basic info
target_info = { target_info = {
'platform': platform, "platform": platform,
'user_id': user_id, "user_id": user_id,
'user_nickname': user_info.user_nickname, "user_nickname": user_info.user_nickname,
'person_id': None, "person_id": None,
'person_name': None "person_name": None,
} }
# Try to fetch person info # Try to fetch person info
try: try:
# Assume get_person_id is sync (as per original code), keep using to_thread # Assume get_person_id is sync (as per original code), keep using to_thread
person_id = await asyncio.to_thread(person_info_manager.get_person_id, platform, user_id) person_id = await asyncio.to_thread(person_info_manager.get_person_id, platform, user_id)
@@ -54,18 +55,20 @@ async def get_chat_type_and_target_info(chat_id: str) -> Tuple[bool, Optional[Di
# get_value is async, so await it directly # get_value is async, so await it directly
person_name = await person_info_manager.get_value(person_id, "person_name") person_name = await person_info_manager.get_value(person_id, "person_name")
target_info['person_id'] = person_id target_info["person_id"] = person_id
target_info['person_name'] = person_name target_info["person_name"] = person_name
except Exception as person_e: except Exception as person_e:
logger.warning(f"获取 person_id 或 person_name 时出错 for {platform}:{user_id} in utils: {person_e}") logger.warning(
f"获取 person_id 或 person_name 时出错 for {platform}:{user_id} in utils: {person_e}"
chat_target_info = target_info )
chat_target_info = target_info
else: else:
logger.warning(f"无法获取 chat_stream for {chat_id} in utils") logger.warning(f"无法获取 chat_stream for {chat_id} in utils")
# Keep defaults: is_group_chat=False, chat_target_info=None # Keep defaults: is_group_chat=False, chat_target_info=None
except Exception as e: except Exception as e:
logger.error(f"获取聊天类型和目标信息时出错 for {chat_id}: {e}", exc_info=True) logger.error(f"获取聊天类型和目标信息时出错 for {chat_id}: {e}", exc_info=True)
# Keep defaults on error # Keep defaults on error
return is_group_chat, chat_target_info return is_group_chat, chat_target_info

View File

@@ -26,7 +26,6 @@ from .heartFC_sender import HeartFCSender
from src.plugins.chat.utils import process_llm_response from src.plugins.chat.utils import process_llm_response
from src.plugins.respon_info_catcher.info_catcher import info_catcher_manager from src.plugins.respon_info_catcher.info_catcher import info_catcher_manager
from src.plugins.moods.moods import MoodManager from src.plugins.moods.moods import MoodManager
from src.individuality.individuality import Individuality
from src.heart_flow.utils_chat import get_chat_type_and_target_info from src.heart_flow.utils_chat import get_chat_type_and_target_info
@@ -197,9 +196,9 @@ class HeartFChatting:
# 日志前缀 # 日志前缀
self.log_prefix: str = str(chat_id) # Initial default, will be updated self.log_prefix: str = str(chat_id) # Initial default, will be updated
# --- Initialize attributes (defaults) --- # --- Initialize attributes (defaults) ---
self.is_group_chat: bool = False self.is_group_chat: bool = False
self.chat_target_info: Optional[dict] = None self.chat_target_info: Optional[dict] = None
# --- End Initialization --- # --- End Initialization ---
# 动作管理器 # 动作管理器
@@ -244,26 +243,30 @@ class HeartFChatting:
""" """
if self._initialized: if self._initialized:
return True return True
# --- Use utility function to determine chat type and fetch info --- # --- Use utility function to determine chat type and fetch info ---
# Note: get_chat_type_and_target_info handles getting the chat_stream internally # Note: get_chat_type_and_target_info handles getting the chat_stream internally
self.is_group_chat, self.chat_target_info = await get_chat_type_and_target_info(self.stream_id) self.is_group_chat, self.chat_target_info = await get_chat_type_and_target_info(self.stream_id)
# Update log prefix based on potential stream name (if needed, or get it from chat_stream if util doesn't return it) # Update log prefix based on potential stream name (if needed, or get it from chat_stream if util doesn't return it)
# Assuming get_chat_type_and_target_info focuses only on type/target # Assuming get_chat_type_and_target_info focuses only on type/target
# We still need the chat_stream object itself for other operations # We still need the chat_stream object itself for other operations
try: try:
self.chat_stream = await asyncio.to_thread(chat_manager.get_stream, self.stream_id) self.chat_stream = await asyncio.to_thread(chat_manager.get_stream, self.stream_id)
if not self.chat_stream: if not self.chat_stream:
logger.error(f"[HFC:{self.stream_id}] 获取ChatStream失败 during _initialize, though util func might have succeeded earlier.") logger.error(
return False # Cannot proceed without chat_stream object f"[HFC:{self.stream_id}] 获取ChatStream失败 during _initialize, though util func might have succeeded earlier."
)
return False # Cannot proceed without chat_stream object
# Update log prefix using the fetched stream object # Update log prefix using the fetched stream object
self.log_prefix = f"[{chat_manager.get_stream_name(self.stream_id) or self.stream_id}]" self.log_prefix = f"[{chat_manager.get_stream_name(self.stream_id) or self.stream_id}]"
except Exception as e: except Exception as e:
logger.error(f"[HFC:{self.stream_id}] 获取ChatStream时出错 in _initialize: {e}") logger.error(f"[HFC:{self.stream_id}] 获取ChatStream时出错 in _initialize: {e}")
return False return False
logger.debug(f"{self.log_prefix} HeartFChatting initialized: is_group={self.is_group_chat}, target_info={self.chat_target_info}") logger.debug(
f"{self.log_prefix} HeartFChatting initialized: is_group={self.is_group_chat}, target_info={self.chat_target_info}"
)
# --- End using utility function --- # --- End using utility function ---
self._initialized = True self._initialized = True
@@ -853,13 +856,13 @@ class HeartFChatting:
# --- 构建提示词 (调用修改后的 PromptBuilder 方法) --- # --- 构建提示词 (调用修改后的 PromptBuilder 方法) ---
prompt = await prompt_builder.build_planner_prompt( prompt = await prompt_builder.build_planner_prompt(
is_group_chat=self.is_group_chat, # <-- Pass HFC state is_group_chat=self.is_group_chat, # <-- Pass HFC state
chat_target_info=self.chat_target_info, # <-- Pass HFC state chat_target_info=self.chat_target_info, # <-- Pass HFC state
cycle_history=self._cycle_history, # <-- Pass HFC state cycle_history=self._cycle_history, # <-- Pass HFC state
observed_messages_str=observed_messages_str, # <-- Pass local variable observed_messages_str=observed_messages_str, # <-- Pass local variable
current_mind=current_mind, # <-- Pass argument current_mind=current_mind, # <-- Pass argument
structured_info=self.sub_mind.structured_info, # <-- Pass SubMind info structured_info=self.sub_mind.structured_info, # <-- Pass SubMind info
current_available_actions=current_available_actions # <-- Pass determined actions current_available_actions=current_available_actions, # <-- Pass determined actions
) )
# --- 调用 LLM (普通文本生成) --- # --- 调用 LLM (普通文本生成) ---
@@ -1279,25 +1282,29 @@ class HeartFChatting:
# 2. 获取信息捕捉器 # 2. 获取信息捕捉器
info_catcher = info_catcher_manager.get_info_catcher(thinking_id) info_catcher = info_catcher_manager.get_info_catcher(thinking_id)
# --- Determine sender_name for private chat --- # --- Determine sender_name for private chat ---
sender_name_for_prompt = "某人" # Default for group or if info unavailable sender_name_for_prompt = "某人" # Default for group or if info unavailable
if not self.is_group_chat and self.chat_target_info: if not self.is_group_chat and self.chat_target_info:
# Prioritize person_name, then nickname # Prioritize person_name, then nickname
sender_name_for_prompt = self.chat_target_info.get('person_name') or self.chat_target_info.get('user_nickname') or sender_name_for_prompt sender_name_for_prompt = (
# --- End determining sender_name --- self.chat_target_info.get("person_name")
or self.chat_target_info.get("user_nickname")
or sender_name_for_prompt
)
# --- End determining sender_name ---
# 3. 构建 Prompt # 3. 构建 Prompt
with Timer("构建Prompt", {}): # 内部计时器,可选保留 with Timer("构建Prompt", {}): # 内部计时器,可选保留
prompt = await prompt_builder.build_prompt( prompt = await prompt_builder.build_prompt(
build_mode="focus", build_mode="focus",
chat_stream=self.chat_stream, # Pass the stream object chat_stream=self.chat_stream, # Pass the stream object
# Focus specific args: # Focus specific args:
reason=reason, reason=reason,
current_mind_info=self.sub_mind.current_mind, current_mind_info=self.sub_mind.current_mind,
structured_info=self.sub_mind.structured_info, structured_info=self.sub_mind.structured_info,
sender_name=sender_name_for_prompt, # Pass determined name sender_name=sender_name_for_prompt, # Pass determined name
# Normal specific args (not used in focus mode): # Normal specific args (not used in focus mode):
# message_txt="", # message_txt="",
) )
# 4. 调用 LLM 生成回复 # 4. 调用 LLM 生成回复
@@ -1305,9 +1312,9 @@ class HeartFChatting:
reasoning_content = None reasoning_content = None
model_name = "unknown_model" model_name = "unknown_model"
if not prompt: if not prompt:
logger.error(f"{self.log_prefix}[Replier-{thinking_id}] Prompt 构建失败,无法生成回复。") logger.error(f"{self.log_prefix}[Replier-{thinking_id}] Prompt 构建失败,无法生成回复。")
return None return None
try: try:
with Timer("LLM生成", {}): # 内部计时器,可选保留 with Timer("LLM生成", {}): # 内部计时器,可选保留
content, reasoning_content, model_name = await self.model_normal.generate_response(prompt) content, reasoning_content, model_name = await self.model_normal.generate_response(prompt)

View File

@@ -138,7 +138,7 @@ JSON 结构如下,包含三个字段 "action", "reasoning", "emoji_query":
Prompt("你现在正在做的事情是:{schedule_info}", "schedule_prompt") Prompt("你现在正在做的事情是:{schedule_info}", "schedule_prompt")
Prompt("\n你有以下这些**知识**\n{prompt_info}\n请你**记住上面的知识**,之后可能会用到。\n", "knowledge_prompt") Prompt("\n你有以下这些**知识**\n{prompt_info}\n请你**记住上面的知识**,之后可能会用到。\n", "knowledge_prompt")
# --- Template for HeartFChatting (FOCUSED mode) --- # --- Template for HeartFChatting (FOCUSED mode) ---
Prompt( Prompt(
""" """
{info_from_tools} {info_from_tools}
@@ -157,10 +157,10 @@ JSON 结构如下,包含三个字段 "action", "reasoning", "emoji_query":
回复尽量简短一些。请注意把握聊天内容,{reply_style2}{prompt_ger} 回复尽量简短一些。请注意把握聊天内容,{reply_style2}{prompt_ger}
{reply_style1},说中文,不要刻意突出自身学科背景,注意只输出回复内容。 {reply_style1},说中文,不要刻意突出自身学科背景,注意只输出回复内容。
{moderation_prompt}。注意:回复不要输出多余内容(包括前后缀冒号和引号括号表情包at或 @等 )。""", {moderation_prompt}。注意:回复不要输出多余内容(包括前后缀冒号和引号括号表情包at或 @等 )。""",
"heart_flow_private_prompt", # New template for private FOCUSED chat "heart_flow_private_prompt", # New template for private FOCUSED chat
) )
# --- Template for NormalChat (CHAT mode) --- # --- Template for NormalChat (CHAT mode) ---
Prompt( Prompt(
""" """
{memory_prompt} {memory_prompt}
@@ -179,17 +179,17 @@ JSON 结构如下,包含三个字段 "action", "reasoning", "emoji_query":
请注意不要输出多余内容(包括前后缀,冒号和引号,括号等),只输出回复内容。 请注意不要输出多余内容(包括前后缀,冒号和引号,括号等),只输出回复内容。
{moderation_prompt} {moderation_prompt}
不要输出多余内容(包括前后缀,冒号和引号,括号()表情包at或 @等 )。只输出回复内容""", 不要输出多余内容(包括前后缀,冒号和引号,括号()表情包at或 @等 )。只输出回复内容""",
"reasoning_prompt_private_main", # New template for private CHAT chat "reasoning_prompt_private_main", # New template for private CHAT chat
) )
async def _build_prompt_focus(reason, current_mind_info, structured_info, chat_stream, sender_name) -> str: async def _build_prompt_focus(reason, current_mind_info, structured_info, chat_stream, sender_name) -> str:
individuality = Individuality.get_instance() individuality = Individuality.get_instance()
prompt_personality = individuality.get_prompt(x_person=0, level=2) prompt_personality = individuality.get_prompt(x_person=0, level=2)
# Determine if it's a group chat # Determine if it's a group chat
is_group_chat = bool(chat_stream.group_info) is_group_chat = bool(chat_stream.group_info)
# Use sender_name passed from caller for private chat, otherwise use a default for group # Use sender_name passed from caller for private chat, otherwise use a default for group
# Default sender_name for group chat isn't used in the group prompt template, but set for consistency # Default sender_name for group chat isn't used in the group prompt template, but set for consistency
effective_sender_name = sender_name if not is_group_chat else "某人" effective_sender_name = sender_name if not is_group_chat else "某人"
@@ -243,21 +243,21 @@ async def _build_prompt_focus(reason, current_mind_info, structured_info, chat_s
logger.debug("开始构建 focus prompt") logger.debug("开始构建 focus prompt")
# --- Choose template based on chat type --- # --- Choose template based on chat type ---
if is_group_chat: if is_group_chat:
template_name = "heart_flow_prompt" template_name = "heart_flow_prompt"
# Group specific formatting variables (already fetched or default) # Group specific formatting variables (already fetched or default)
chat_target_1 = await global_prompt_manager.get_prompt_async("chat_target_group1") chat_target_1 = await global_prompt_manager.get_prompt_async("chat_target_group1")
chat_target_2 = await global_prompt_manager.get_prompt_async("chat_target_group2") chat_target_2 = await global_prompt_manager.get_prompt_async("chat_target_group2")
prompt = await global_prompt_manager.format_prompt( prompt = await global_prompt_manager.format_prompt(
template_name, template_name,
info_from_tools=structured_info_prompt, info_from_tools=structured_info_prompt,
chat_target=chat_target_1, # Used in group template chat_target=chat_target_1, # Used in group template
chat_talking_prompt=chat_talking_prompt, chat_talking_prompt=chat_talking_prompt,
bot_name=global_config.BOT_NICKNAME, bot_name=global_config.BOT_NICKNAME,
prompt_personality=prompt_personality, prompt_personality=prompt_personality,
chat_target_2=chat_target_2, # Used in group template chat_target_2=chat_target_2, # Used in group template
current_mind_info=current_mind_info, current_mind_info=current_mind_info,
reply_style2=reply_style2_chosen, reply_style2=reply_style2_chosen,
reply_style1=reply_style1_chosen, reply_style1=reply_style1_chosen,
@@ -266,12 +266,12 @@ async def _build_prompt_focus(reason, current_mind_info, structured_info, chat_s
moderation_prompt=await global_prompt_manager.get_prompt_async("moderation_prompt"), moderation_prompt=await global_prompt_manager.get_prompt_async("moderation_prompt"),
# sender_name is not used in the group template # sender_name is not used in the group template
) )
else: # Private chat else: # Private chat
template_name = "heart_flow_private_prompt" template_name = "heart_flow_private_prompt"
prompt = await global_prompt_manager.format_prompt( prompt = await global_prompt_manager.format_prompt(
template_name, template_name,
info_from_tools=structured_info_prompt, info_from_tools=structured_info_prompt,
sender_name=effective_sender_name, # Used in private template sender_name=effective_sender_name, # Used in private template
chat_talking_prompt=chat_talking_prompt, chat_talking_prompt=chat_talking_prompt,
bot_name=global_config.BOT_NICKNAME, bot_name=global_config.BOT_NICKNAME,
prompt_personality=prompt_personality, prompt_personality=prompt_personality,
@@ -283,7 +283,7 @@ async def _build_prompt_focus(reason, current_mind_info, structured_info, chat_s
prompt_ger=prompt_ger, prompt_ger=prompt_ger,
moderation_prompt=await global_prompt_manager.get_prompt_async("moderation_prompt"), moderation_prompt=await global_prompt_manager.get_prompt_async("moderation_prompt"),
) )
# --- End choosing template --- # --- End choosing template ---
logger.debug(f"focus_chat_prompt (is_group={is_group_chat}): \n{prompt}") logger.debug(f"focus_chat_prompt (is_group={is_group_chat}): \n{prompt}")
return prompt return prompt
@@ -302,10 +302,8 @@ class PromptBuilder:
current_mind_info=None, current_mind_info=None,
structured_info=None, structured_info=None,
message_txt=None, message_txt=None,
sender_name = "某人", sender_name="某人",
) -> Optional[str]: ) -> Optional[str]:
is_group_chat = bool(chat_stream.group_info)
if build_mode == "normal": if build_mode == "normal":
return await self._build_prompt_normal(chat_stream, message_txt, sender_name) return await self._build_prompt_normal(chat_stream, message_txt, sender_name)
@@ -326,20 +324,22 @@ class PromptBuilder:
who_chat_in_group = [] who_chat_in_group = []
if is_group_chat: if is_group_chat:
who_chat_in_group = get_recent_group_speaker( who_chat_in_group = get_recent_group_speaker(
chat_stream.stream_id, chat_stream.stream_id,
(chat_stream.user_info.platform, chat_stream.user_info.user_id) if chat_stream.user_info else None, (chat_stream.user_info.platform, chat_stream.user_info.user_id) if chat_stream.user_info else None,
limit=global_config.observation_context_size, limit=global_config.observation_context_size,
) )
elif chat_stream.user_info: elif chat_stream.user_info:
who_chat_in_group.append((chat_stream.user_info.platform, chat_stream.user_info.user_id, chat_stream.user_info.user_nickname)) who_chat_in_group.append(
(chat_stream.user_info.platform, chat_stream.user_info.user_id, chat_stream.user_info.user_nickname)
)
relation_prompt = "" relation_prompt = ""
for person in who_chat_in_group: for person in who_chat_in_group:
if len(person) >= 3 and person[0] and person[1]: if len(person) >= 3 and person[0] and person[1]:
relation_prompt += await relationship_manager.build_relationship_info(person) relation_prompt += await relationship_manager.build_relationship_info(person)
else: else:
logger.warning(f"Invalid person tuple encountered for relationship prompt: {person}") logger.warning(f"Invalid person tuple encountered for relationship prompt: {person}")
mood_manager = MoodManager.get_instance() mood_manager = MoodManager.get_instance()
mood_prompt = mood_manager.get_prompt() mood_prompt = mood_manager.get_prompt()
@@ -425,8 +425,6 @@ class PromptBuilder:
end_time = time.time() end_time = time.time()
logger.debug(f"知识检索耗时: {(end_time - start_time):.3f}") logger.debug(f"知识检索耗时: {(end_time - start_time):.3f}")
if global_config.ENABLE_SCHEDULE_GEN: if global_config.ENABLE_SCHEDULE_GEN:
schedule_prompt = await global_prompt_manager.format_prompt( schedule_prompt = await global_prompt_manager.format_prompt(
"schedule_prompt", schedule_info=bot_schedule.get_current_num_task(num=1, time_info=False) "schedule_prompt", schedule_info=bot_schedule.get_current_num_task(num=1, time_info=False)
@@ -435,14 +433,14 @@ class PromptBuilder:
schedule_prompt = "" schedule_prompt = ""
logger.debug("开始构建 normal prompt") logger.debug("开始构建 normal prompt")
# --- Choose template and format based on chat type --- # --- Choose template and format based on chat type ---
if is_group_chat: if is_group_chat:
template_name = "reasoning_prompt_main" template_name = "reasoning_prompt_main"
effective_sender_name = sender_name effective_sender_name = sender_name
chat_target_1 = await global_prompt_manager.get_prompt_async("chat_target_group1") chat_target_1 = await global_prompt_manager.get_prompt_async("chat_target_group1")
chat_target_2 = await global_prompt_manager.get_prompt_async("chat_target_group2") chat_target_2 = await global_prompt_manager.get_prompt_async("chat_target_group2")
prompt = await global_prompt_manager.format_prompt( prompt = await global_prompt_manager.format_prompt(
template_name, template_name,
relation_prompt=relation_prompt, relation_prompt=relation_prompt,
@@ -466,8 +464,8 @@ class PromptBuilder:
) )
else: else:
template_name = "reasoning_prompt_private_main" template_name = "reasoning_prompt_private_main"
effective_sender_name = sender_name effective_sender_name = sender_name
prompt = await global_prompt_manager.format_prompt( prompt = await global_prompt_manager.format_prompt(
template_name, template_name,
relation_prompt=relation_prompt, relation_prompt=relation_prompt,
@@ -487,7 +485,7 @@ class PromptBuilder:
prompt_ger=prompt_ger, prompt_ger=prompt_ger,
moderation_prompt=await global_prompt_manager.get_prompt_async("moderation_prompt"), moderation_prompt=await global_prompt_manager.get_prompt_async("moderation_prompt"),
) )
# --- End choosing template --- # --- End choosing template ---
return prompt return prompt
@@ -749,9 +747,9 @@ class PromptBuilder:
async def build_planner_prompt( async def build_planner_prompt(
self, self,
is_group_chat: bool, # Now passed as argument is_group_chat: bool, # Now passed as argument
chat_target_info: Optional[dict], # Now passed as argument chat_target_info: Optional[dict], # Now passed as argument
cycle_history: Deque["CycleInfo"], # Now passed as argument (Type hint needs import or string) cycle_history: Deque["CycleInfo"], # Now passed as argument (Type hint needs import or string)
observed_messages_str: str, observed_messages_str: str,
current_mind: Optional[str], current_mind: Optional[str],
structured_info: Dict[str, Any], structured_info: Dict[str, Any],
@@ -760,20 +758,22 @@ class PromptBuilder:
) -> str: ) -> str:
"""构建 Planner LLM 的提示词 (获取模板并填充数据)""" """构建 Planner LLM 的提示词 (获取模板并填充数据)"""
try: try:
# --- Determine chat context --- # --- Determine chat context ---
chat_context_description = "你现在正在一个群聊中" chat_context_description = "你现在正在一个群聊中"
chat_target_name = None # Only relevant for private chat_target_name = None # Only relevant for private
if not is_group_chat and chat_target_info: if not is_group_chat and chat_target_info:
chat_target_name = chat_target_info.get('person_name') or chat_target_info.get('user_nickname') or "对方" chat_target_name = (
chat_target_info.get("person_name") or chat_target_info.get("user_nickname") or "对方"
)
chat_context_description = f"你正在和 {chat_target_name} 私聊" chat_context_description = f"你正在和 {chat_target_name} 私聊"
# --- End determining chat context --- # --- End determining chat context ---
# ... (Copy logic from HeartFChatting._build_planner_prompt here) ... # ... (Copy logic from HeartFChatting._build_planner_prompt here) ...
# Structured info block # Structured info block
structured_info_block = "" structured_info_block = ""
if structured_info: if structured_info:
structured_info_block = f"以下是一些额外的信息:\n{structured_info}\n" structured_info_block = f"以下是一些额外的信息:\n{structured_info}\n"
# Chat content block # Chat content block
chat_content_block = "" chat_content_block = ""
if observed_messages_str: if observed_messages_str:
@@ -784,14 +784,14 @@ class PromptBuilder:
---""" ---"""
else: else:
chat_content_block = "当前没有观察到新的聊天内容。\\n" chat_content_block = "当前没有观察到新的聊天内容。\\n"
# Current mind block # Current mind block
current_mind_block = "" current_mind_block = ""
if current_mind: if current_mind:
current_mind_block = f"你的内心想法:\n{current_mind}" current_mind_block = f"你的内心想法:\n{current_mind}"
else: else:
current_mind_block = "你的内心想法:\n[没有特别的想法]" current_mind_block = "你的内心想法:\n[没有特别的想法]"
# Cycle info block (using passed cycle_history) # Cycle info block (using passed cycle_history)
cycle_info_block = "" cycle_info_block = ""
recent_active_cycles = [] recent_active_cycles = []

View File

@@ -33,14 +33,14 @@ class NormalChat:
self.chat_stream = chat_stream self.chat_stream = chat_stream
self.stream_id = chat_stream.stream_id self.stream_id = chat_stream.stream_id
# Get initial stream name, might be updated in initialize # Get initial stream name, might be updated in initialize
self.stream_name = chat_manager.get_stream_name(self.stream_id) or self.stream_id self.stream_name = chat_manager.get_stream_name(self.stream_id) or self.stream_id
# Interest dict # Interest dict
self.interest_dict = interest_dict self.interest_dict = interest_dict
# --- Initialize attributes (defaults) --- # --- Initialize attributes (defaults) ---
self.is_group_chat: bool = False self.is_group_chat: bool = False
self.chat_target_info: Optional[dict] = None self.chat_target_info: Optional[dict] = None
# --- End Initialization --- # --- End Initialization ---
# Other sync initializations # Other sync initializations
@@ -49,21 +49,23 @@ class NormalChat:
self.start_time = time.time() self.start_time = time.time()
self.last_speak_time = 0 self.last_speak_time = 0
self._chat_task: Optional[asyncio.Task] = None self._chat_task: Optional[asyncio.Task] = None
self._initialized = False # Track initialization status self._initialized = False # Track initialization status
# logger.info(f"[{self.stream_name}] NormalChat 实例 __init__ 完成 (同步部分)。") # logger.info(f"[{self.stream_name}] NormalChat 实例 __init__ 完成 (同步部分)。")
# Avoid logging here as stream_name might not be final # Avoid logging here as stream_name might not be final
async def initialize(self): async def initialize(self):
"""异步初始化,获取聊天类型和目标信息。""" """异步初始化,获取聊天类型和目标信息。"""
if self._initialized: if self._initialized:
return return
# --- Use utility function to determine chat type and fetch info --- # --- Use utility function to determine chat type and fetch info ---
self.is_group_chat, self.chat_target_info = await get_chat_type_and_target_info(self.stream_id) self.is_group_chat, self.chat_target_info = await get_chat_type_and_target_info(self.stream_id)
# Update stream_name again after potential async call in util func # Update stream_name again after potential async call in util func
self.stream_name = chat_manager.get_stream_name(self.stream_id) or self.stream_id self.stream_name = chat_manager.get_stream_name(self.stream_id) or self.stream_id
logger.debug(f"[{self.stream_name}] NormalChat initialized: is_group={self.is_group_chat}, target_info={self.chat_target_info}") logger.debug(
f"[{self.stream_name}] NormalChat initialized: is_group={self.is_group_chat}, target_info={self.chat_target_info}"
)
# --- End using utility function --- # --- End using utility function ---
self._initialized = True self._initialized = True
logger.info(f"[{self.stream_name}] NormalChat 实例 initialize 完成 (异步部分)。") logger.info(f"[{self.stream_name}] NormalChat 实例 initialize 完成 (异步部分)。")
@@ -437,8 +439,8 @@ class NormalChat:
async def start_chat(self): async def start_chat(self):
"""先进行异步初始化,然后启动聊天任务。""" """先进行异步初始化,然后启动聊天任务。"""
if not self._initialized: if not self._initialized:
await self.initialize() # Ensure initialized before starting tasks await self.initialize() # Ensure initialized before starting tasks
if self._chat_task is None or self._chat_task.done(): if self._chat_task is None or self._chat_task.done():
logger.info(f"[{self.stream_name}] 开始后台处理初始兴趣消息和轮询任务...") logger.info(f"[{self.stream_name}] 开始后台处理初始兴趣消息和轮询任务...")
# Process initial messages first # Process initial messages first