🤖 自动格式化代码 [skip ci]

This commit is contained in:
github-actions[bot]
2025-06-16 14:10:49 +00:00
parent f6215cd560
commit ee005456ea
16 changed files with 461 additions and 383 deletions

View File

@@ -341,7 +341,12 @@ class HeartFChatting:
},
"observed_messages": "",
},
"loop_action_info": {"action_taken": False, "reply_text": "", "command": "", "taken_time": time.time()},
"loop_action_info": {
"action_taken": False,
"reply_text": "",
"command": "",
"taken_time": time.time(),
},
}
self._current_cycle_detail.set_loop_info(error_loop_info)
self._current_cycle_detail.complete_cycle()
@@ -420,7 +425,12 @@ class HeartFChatting:
},
"observed_messages": "",
},
"loop_action_info": {"action_taken": False, "reply_text": "", "command": "", "taken_time": time.time()},
"loop_action_info": {
"action_taken": False,
"reply_text": "",
"command": "",
"taken_time": time.time(),
},
}
try:
self._current_cycle_detail.set_loop_info(error_loop_info)

View File

@@ -1,4 +1,3 @@
from reportportal_client import current
from src.chat.heart_flow.observation.chatting_observation import ChattingObservation
from src.chat.heart_flow.observation.observation import Observation
from src.llm_models.utils_model import LLMRequest
@@ -18,7 +17,12 @@ from json_repair import repair_json
from src.person_info.person_info import get_person_info_manager
import json
import asyncio
from src.chat.utils.chat_message_builder import get_raw_msg_by_timestamp_with_chat, get_raw_msg_by_timestamp_with_chat_inclusive, get_raw_msg_before_timestamp_with_chat, num_new_messages_since
from src.chat.utils.chat_message_builder import (
get_raw_msg_by_timestamp_with_chat,
get_raw_msg_by_timestamp_with_chat_inclusive,
get_raw_msg_before_timestamp_with_chat,
num_new_messages_since,
)
import os
import pickle
@@ -106,18 +110,18 @@ class RelationshipProcessor(BaseProcessor):
self.info_fetched_cache: Dict[
str, Dict[str, any]
] = {} # {person_id: {"info": str, "ttl": int, "start_time": float}}
# 新的消息段缓存结构:
# {person_id: [{"start_time": float, "end_time": float, "last_msg_time": float, "message_count": int}, ...]}
self.person_engaged_cache: Dict[str, List[Dict[str, any]]] = {}
# 持久化存储文件路径
self.cache_file_path = os.path.join("data", f"relationship_cache_{self.subheartflow_id}.pkl")
# 最后处理的消息时间,避免重复处理相同消息
current_time = time.time()
self.last_processed_message_time = current_time
# 最后清理时间,用于定期清理老消息段
self.last_cleanup_time = 0.0
@@ -135,7 +139,7 @@ class RelationshipProcessor(BaseProcessor):
name = get_chat_manager().get_stream_name(self.subheartflow_id)
self.log_prefix = f"[{name}] "
# 加载持久化的缓存
self._load_cache()
@@ -143,19 +147,21 @@ class RelationshipProcessor(BaseProcessor):
# 缓存管理模块
# 负责持久化存储、状态管理、缓存读写
# ================================
def _load_cache(self):
"""从文件加载持久化的缓存"""
if os.path.exists(self.cache_file_path):
try:
with open(self.cache_file_path, 'rb') as f:
with open(self.cache_file_path, "rb") as f:
cache_data = pickle.load(f)
# 新格式:包含额外信息的缓存
self.person_engaged_cache = cache_data.get('person_engaged_cache', {})
self.last_processed_message_time = cache_data.get('last_processed_message_time', 0.0)
self.last_cleanup_time = cache_data.get('last_cleanup_time', 0.0)
# 新格式:包含额外信息的缓存
self.person_engaged_cache = cache_data.get("person_engaged_cache", {})
self.last_processed_message_time = cache_data.get("last_processed_message_time", 0.0)
self.last_cleanup_time = cache_data.get("last_cleanup_time", 0.0)
logger.info(f"{self.log_prefix} 成功加载关系缓存,包含 {len(self.person_engaged_cache)} 个用户,最后处理时间:{time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(self.last_processed_message_time)) if self.last_processed_message_time > 0 else '未设置'}")
logger.info(
f"{self.log_prefix} 成功加载关系缓存,包含 {len(self.person_engaged_cache)} 个用户,最后处理时间:{time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(self.last_processed_message_time)) if self.last_processed_message_time > 0 else '未设置'}"
)
except Exception as e:
logger.error(f"{self.log_prefix} 加载关系缓存失败: {e}")
self.person_engaged_cache = {}
@@ -168,63 +174,65 @@ class RelationshipProcessor(BaseProcessor):
try:
os.makedirs(os.path.dirname(self.cache_file_path), exist_ok=True)
cache_data = {
'person_engaged_cache': self.person_engaged_cache,
'last_processed_message_time': self.last_processed_message_time,
'last_cleanup_time': self.last_cleanup_time
"person_engaged_cache": self.person_engaged_cache,
"last_processed_message_time": self.last_processed_message_time,
"last_cleanup_time": self.last_cleanup_time,
}
with open(self.cache_file_path, 'wb') as f:
with open(self.cache_file_path, "wb") as f:
pickle.dump(cache_data, f)
logger.debug(f"{self.log_prefix} 成功保存关系缓存")
except Exception as e:
logger.error(f"{self.log_prefix} 保存关系缓存失败: {e}")
# ================================
# 消息段管理模块
# 消息段管理模块
# 负责跟踪用户消息活动、管理消息段、清理过期数据
# ================================
def _update_message_segments(self, person_id: str, message_time: float):
"""更新用户的消息段
Args:
person_id: 用户ID
message_time: 消息时间戳
"""
if person_id not in self.person_engaged_cache:
self.person_engaged_cache[person_id] = []
segments = self.person_engaged_cache[person_id]
current_time = time.time()
# 获取该消息前5条消息的时间作为潜在的开始时间
before_messages = get_raw_msg_before_timestamp_with_chat(self.subheartflow_id, message_time, limit=5)
if before_messages:
# 由于get_raw_msg_before_timestamp_with_chat返回按时间升序排序的消息最后一个是最接近message_time的
# 我们需要第一个消息作为开始时间但应该确保至少包含5条消息或该用户之前的消息
potential_start_time = before_messages[0]['time']
potential_start_time = before_messages[0]["time"]
else:
# 如果没有前面的消息,就从当前消息开始
potential_start_time = message_time
# 如果没有现有消息段,创建新的
if not segments:
new_segment = {
"start_time": potential_start_time,
"end_time": message_time,
"last_msg_time": message_time,
"message_count": self._count_messages_in_timerange(potential_start_time, message_time)
"message_count": self._count_messages_in_timerange(potential_start_time, message_time),
}
segments.append(new_segment)
logger.info(f"{self.log_prefix} 为用户 {person_id} 创建新消息段: 时间范围 {time.strftime('%H:%M:%S', time.localtime(potential_start_time))} - {time.strftime('%H:%M:%S', time.localtime(message_time))}, 消息数: {new_segment['message_count']}")
logger.info(
f"{self.log_prefix} 为用户 {person_id} 创建新消息段: 时间范围 {time.strftime('%H:%M:%S', time.localtime(potential_start_time))} - {time.strftime('%H:%M:%S', time.localtime(message_time))}, 消息数: {new_segment['message_count']}"
)
self._save_cache()
return
# 获取最后一个消息段
last_segment = segments[-1]
# 计算从最后一条消息到当前消息之间的消息数量(不包含边界)
messages_between = self._count_messages_between(last_segment["last_msg_time"], message_time)
if messages_between <= 10:
# 在10条消息内延伸当前消息段
last_segment["end_time"] = message_time
@@ -242,82 +250,82 @@ class RelationshipProcessor(BaseProcessor):
)
if after_messages and len(after_messages) >= 5:
# 如果有足够的后续消息使用第5条消息的时间作为结束时间
last_segment["end_time"] = after_messages[4]['time']
last_segment["end_time"] = after_messages[4]["time"]
else:
# 如果没有足够的后续消息,保持原有的结束时间
pass
# 重新计算当前消息段的消息数量
last_segment["message_count"] = self._count_messages_in_timerange(
last_segment["start_time"], last_segment["end_time"]
)
# 创建新的消息段
new_segment = {
"start_time": potential_start_time,
"end_time": message_time,
"last_msg_time": message_time,
"message_count": self._count_messages_in_timerange(potential_start_time, message_time)
"message_count": self._count_messages_in_timerange(potential_start_time, message_time),
}
segments.append(new_segment)
logger.info(f"{self.log_prefix} 为用户 {person_id} 创建新消息段超过10条消息间隔: {new_segment}")
self._save_cache()
def _count_messages_in_timerange(self, start_time: float, end_time: float) -> int:
"""计算指定时间范围内的消息数量(包含边界)"""
messages = get_raw_msg_by_timestamp_with_chat_inclusive(self.subheartflow_id, start_time, end_time)
return len(messages)
def _count_messages_between(self, start_time: float, end_time: float) -> int:
"""计算两个时间点之间的消息数量(不包含边界),用于间隔检查"""
return num_new_messages_since(self.subheartflow_id, start_time, end_time)
def _get_total_message_count(self, person_id: str) -> int:
"""获取用户所有消息段的总消息数量"""
if person_id not in self.person_engaged_cache:
return 0
total_count = 0
for segment in self.person_engaged_cache[person_id]:
total_count += segment["message_count"]
return total_count
def _cleanup_old_segments(self) -> bool:
"""清理老旧的消息段
Returns:
bool: 是否执行了清理操作
"""
if not SEGMENT_CLEANUP_CONFIG["enable_cleanup"]:
return False
current_time = time.time()
# 检查是否需要执行清理(基于时间间隔)
cleanup_interval_seconds = SEGMENT_CLEANUP_CONFIG["cleanup_interval_hours"] * 3600
if current_time - self.last_cleanup_time < cleanup_interval_seconds:
return False
logger.info(f"{self.log_prefix} 开始执行老消息段清理...")
cleanup_stats = {
"users_cleaned": 0,
"segments_removed": 0,
"total_segments_before": 0,
"total_segments_after": 0
"total_segments_after": 0,
}
max_age_seconds = SEGMENT_CLEANUP_CONFIG["max_segment_age_days"] * 24 * 3600
max_segments_per_user = SEGMENT_CLEANUP_CONFIG["max_segments_per_user"]
users_to_remove = []
for person_id, segments in self.person_engaged_cache.items():
cleanup_stats["total_segments_before"] += len(segments)
original_segment_count = len(segments)
# 1. 按时间清理:移除过期的消息段
segments_after_age_cleanup = []
for segment in segments:
@@ -326,8 +334,10 @@ class RelationshipProcessor(BaseProcessor):
segments_after_age_cleanup.append(segment)
else:
cleanup_stats["segments_removed"] += 1
logger.debug(f"{self.log_prefix} 移除用户 {person_id} 的过期消息段: {time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(segment['start_time']))} - {time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(segment['end_time']))}")
logger.debug(
f"{self.log_prefix} 移除用户 {person_id} 的过期消息段: {time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(segment['start_time']))} - {time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(segment['end_time']))}"
)
# 2. 按数量清理:如果消息段数量仍然过多,保留最新的
if len(segments_after_age_cleanup) > max_segments_per_user:
# 按end_time排序保留最新的
@@ -335,10 +345,12 @@ class RelationshipProcessor(BaseProcessor):
segments_removed_count = len(segments_after_age_cleanup) - max_segments_per_user
cleanup_stats["segments_removed"] += segments_removed_count
segments_after_age_cleanup = segments_after_age_cleanup[:max_segments_per_user]
logger.debug(f"{self.log_prefix} 用户 {person_id} 消息段数量过多,移除 {segments_removed_count} 个最老的消息段")
logger.debug(
f"{self.log_prefix} 用户 {person_id} 消息段数量过多,移除 {segments_removed_count} 个最老的消息段"
)
# 使用清理后的消息段
# 更新缓存
if len(segments_after_age_cleanup) == 0:
# 如果没有剩余消息段,标记用户为待移除
@@ -346,34 +358,38 @@ class RelationshipProcessor(BaseProcessor):
else:
self.person_engaged_cache[person_id] = segments_after_age_cleanup
cleanup_stats["total_segments_after"] += len(segments_after_age_cleanup)
if original_segment_count != len(segments_after_age_cleanup):
cleanup_stats["users_cleaned"] += 1
# 移除没有消息段的用户
for person_id in users_to_remove:
del self.person_engaged_cache[person_id]
logger.debug(f"{self.log_prefix} 移除用户 {person_id}:没有剩余消息段")
# 更新最后清理时间
self.last_cleanup_time = current_time
# 保存缓存
if cleanup_stats["segments_removed"] > 0 or len(users_to_remove) > 0:
self._save_cache()
logger.info(f"{self.log_prefix} 清理完成 - 影响用户: {cleanup_stats['users_cleaned']}, 移除消息段: {cleanup_stats['segments_removed']}, 移除用户: {len(users_to_remove)}")
logger.info(f"{self.log_prefix} 消息段统计 - 清理前: {cleanup_stats['total_segments_before']}, 清理后: {cleanup_stats['total_segments_after']}")
logger.info(
f"{self.log_prefix} 清理完成 - 影响用户: {cleanup_stats['users_cleaned']}, 移除消息段: {cleanup_stats['segments_removed']}, 移除用户: {len(users_to_remove)}"
)
logger.info(
f"{self.log_prefix} 消息段统计 - 清理前: {cleanup_stats['total_segments_before']}, 清理后: {cleanup_stats['total_segments_after']}"
)
else:
logger.debug(f"{self.log_prefix} 清理完成 - 无需清理任何内容")
return cleanup_stats["segments_removed"] > 0 or len(users_to_remove) > 0
def force_cleanup_user_segments(self, person_id: str) -> bool:
"""强制清理指定用户的所有消息段
Args:
person_id: 用户ID
Returns:
bool: 是否成功清理
"""
@@ -389,34 +405,42 @@ class RelationshipProcessor(BaseProcessor):
"""获取缓存状态信息,用于调试和监控"""
if not self.person_engaged_cache:
return f"{self.log_prefix} 关系缓存为空"
status_lines = [f"{self.log_prefix} 关系缓存状态:"]
status_lines.append(f"最后处理消息时间:{time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(self.last_processed_message_time)) if self.last_processed_message_time > 0 else '未设置'}")
status_lines.append(f"最后清理时间:{time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(self.last_cleanup_time)) if self.last_cleanup_time > 0 else '执行'}")
status_lines.append(
f"最后处理消息时间:{time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(self.last_processed_message_time)) if self.last_processed_message_time > 0 else '设置'}"
)
status_lines.append(
f"最后清理时间:{time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(self.last_cleanup_time)) if self.last_cleanup_time > 0 else '未执行'}"
)
status_lines.append(f"总用户数:{len(self.person_engaged_cache)}")
status_lines.append(f"清理配置:{'启用' if SEGMENT_CLEANUP_CONFIG['enable_cleanup'] else '禁用'} (最大保存{SEGMENT_CLEANUP_CONFIG['max_segment_age_days']}天, 每用户最多{SEGMENT_CLEANUP_CONFIG['max_segments_per_user']}段)")
status_lines.append(
f"清理配置:{'启用' if SEGMENT_CLEANUP_CONFIG['enable_cleanup'] else '禁用'} (最大保存{SEGMENT_CLEANUP_CONFIG['max_segment_age_days']}天, 每用户最多{SEGMENT_CLEANUP_CONFIG['max_segments_per_user']}段)"
)
status_lines.append("")
for person_id, segments in self.person_engaged_cache.items():
total_count = self._get_total_message_count(person_id)
status_lines.append(f"用户 {person_id}:")
status_lines.append(f" 总消息数:{total_count} ({total_count}/45)")
status_lines.append(f" 消息段数:{len(segments)}")
for i, segment in enumerate(segments):
start_str = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(segment['start_time']))
end_str = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(segment['end_time']))
last_str = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(segment['last_msg_time']))
status_lines.append(f"{i+1}: {start_str} -> {end_str} (最后消息: {last_str}, 消息数: {segment['message_count']})")
start_str = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(segment["start_time"]))
end_str = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(segment["end_time"]))
last_str = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(segment["last_msg_time"]))
status_lines.append(
f"{i + 1}: {start_str} -> {end_str} (最后消息: {last_str}, 消息数: {segment['message_count']})"
)
status_lines.append("")
return "\n".join(status_lines)
# ================================
# 主要处理流程
# 统筹各模块协作、对外提供服务接口
# ================================
async def process_info(self, observations: List[Observation] = None, *infos) -> List[InfoBase]:
"""处理信息对象
@@ -446,7 +470,7 @@ class RelationshipProcessor(BaseProcessor):
"""
# 0. 执行定期清理
self._cleanup_old_segments()
# 1. 从观察信息中提取所需数据
# 需要兼容私聊
@@ -456,24 +480,35 @@ class RelationshipProcessor(BaseProcessor):
for observation in observations:
if isinstance(observation, ChattingObservation):
chat_observe_info = observation.get_observe_info()
# 从聊天观察中提取用户信息并更新消息段
# 获取最新的非bot消息来更新消息段
latest_messages = get_raw_msg_by_timestamp_with_chat(
self.subheartflow_id, self.last_processed_message_time, current_time, limit=50 # 获取自上次处理后的消息
self.subheartflow_id,
self.last_processed_message_time,
current_time,
limit=50, # 获取自上次处理后的消息
)
if latest_messages:
# 处理所有新的非bot消息
for latest_msg in latest_messages:
user_id = latest_msg.get('user_id')
platform = latest_msg.get('user_platform') or latest_msg.get('chat_info_platform')
msg_time = latest_msg.get('time', 0)
if user_id and platform and user_id != global_config.bot.qq_account and msg_time > self.last_processed_message_time:
user_id = latest_msg.get("user_id")
platform = latest_msg.get("user_platform") or latest_msg.get("chat_info_platform")
msg_time = latest_msg.get("time", 0)
if (
user_id
and platform
and user_id != global_config.bot.qq_account
and msg_time > self.last_processed_message_time
):
from src.person_info.person_info import PersonInfoManager
person_id = PersonInfoManager.get_person_id(platform, user_id)
self._update_message_segments(person_id, msg_time)
logger.debug(f"{self.log_prefix} 更新用户 {person_id} 的消息段,消息时间:{time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(msg_time))}")
logger.debug(
f"{self.log_prefix} 更新用户 {person_id} 的消息段,消息时间:{time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(msg_time))}"
)
self.last_processed_message_time = max(self.last_processed_message_time, msg_time)
break
@@ -496,9 +531,7 @@ class RelationshipProcessor(BaseProcessor):
for person_id in users_to_build_relationship:
segments = self.person_engaged_cache[person_id]
# 异步执行关系构建
asyncio.create_task(
self.update_impression_on_segments(person_id, self.subheartflow_id, segments)
)
asyncio.create_task(self.update_impression_on_segments(person_id, self.subheartflow_id, segments))
# 移除已处理的用户缓存
del self.person_engaged_cache[person_id]
self._save_cache()
@@ -659,11 +692,11 @@ class RelationshipProcessor(BaseProcessor):
# 关系构建模块
# 负责触发关系构建、整合消息段、更新用户印象
# ================================
async def update_impression_on_segments(self, person_id: str, chat_id: str, segments: List[Dict[str, any]]):
"""
基于消息段更新用户印象
Args:
person_id: 用户ID
chat_id: 聊天ID
@@ -672,17 +705,21 @@ class RelationshipProcessor(BaseProcessor):
logger.info(f"开始为 {person_id} 基于 {len(segments)} 个消息段更新印象")
try:
processed_messages = []
for i, segment in enumerate(segments):
start_time = segment["start_time"]
end_time = segment["end_time"]
message_count = segment["message_count"]
start_date = time.strftime('%Y-%m-%d %H:%M', time.localtime(start_time))
segment["message_count"]
start_date = time.strftime("%Y-%m-%d %H:%M", time.localtime(start_time))
# 获取该段的消息(包含边界)
segment_messages = get_raw_msg_by_timestamp_with_chat_inclusive(self.subheartflow_id, start_time, end_time)
logger.info(f"消息段 {i+1}: {start_date} - {time.strftime('%Y-%m-%d %H:%M', time.localtime(end_time))}, 消息数: {len(segment_messages)}")
segment_messages = get_raw_msg_by_timestamp_with_chat_inclusive(
self.subheartflow_id, start_time, end_time
)
logger.info(
f"消息段 {i + 1}: {start_date} - {time.strftime('%Y-%m-%d %H:%M', time.localtime(end_time))}, 消息数: {len(segment_messages)}"
)
if segment_messages:
# 如果不是第一个消息段,在消息列表前添加间隔标识
if i > 0:
@@ -690,31 +727,29 @@ class RelationshipProcessor(BaseProcessor):
gap_message = {
"time": start_time - 0.1, # 稍微早于段开始时间
"user_id": "system",
"user_platform": "system",
"user_platform": "system",
"user_nickname": "系统",
"user_cardname": "",
"display_message": f"...(中间省略一些消息){start_date} 之后的消息如下...",
"is_action_record": True,
"chat_info_platform": segment_messages[0].get("chat_info_platform", ""),
"chat_id": chat_id
"chat_id": chat_id,
}
processed_messages.append(gap_message)
# 添加该段的所有消息
processed_messages.extend(segment_messages)
if processed_messages:
# 按时间排序所有消息(包括间隔标识)
processed_messages.sort(key=lambda x: x['time'])
processed_messages.sort(key=lambda x: x["time"])
logger.info(f"{person_id} 获取到总共 {len(processed_messages)} 条消息(包含间隔标识)用于印象更新")
relationship_manager = get_relationship_manager()
# 调用原有的更新方法
await relationship_manager.update_person_impression(
person_id=person_id,
timestamp=time.time(),
bot_engaged_messages=processed_messages
person_id=person_id, timestamp=time.time(), bot_engaged_messages=processed_messages
)
else:
logger.info(f"没有找到 {person_id} 的消息段对应的消息,不更新印象")
@@ -727,7 +762,7 @@ class RelationshipProcessor(BaseProcessor):
# 信息调取模块
# 负责实时分析对话需求、提取用户信息、管理信息缓存
# ================================
async def _execute_instant_extraction_batch(self, instant_tasks: list):
"""
批量执行即时提取任务
@@ -919,6 +954,4 @@ class RelationshipProcessor(BaseProcessor):
logger.error(traceback.format_exc())
init_prompt()

View File

@@ -47,7 +47,9 @@ class HFCloopObservation:
action_reasoning = action_result.get("reasoning", "未提供理由")
is_taken = cycle.loop_action_info.get("action_taken", False)
action_taken_time = cycle.loop_action_info.get("taken_time", 0)
action_taken_time_str = datetime.fromtimestamp(action_taken_time).strftime("%H:%M:%S") if action_taken_time > 0 else "未知时间"
action_taken_time_str = (
datetime.fromtimestamp(action_taken_time).strftime("%H:%M:%S") if action_taken_time > 0 else "未知时间"
)
# print(action_type)
# print(action_reasoning)
# print(is_taken)

View File

@@ -2,7 +2,7 @@ import asyncio
import time
import traceback
from random import random
from typing import List, Optional, Dict, Any # 导入类型提示
from typing import List, Optional, Dict # 导入类型提示
import os
import pickle
from maim_message import UserInfo, Seg
@@ -24,7 +24,12 @@ from src.chat.normal_chat.normal_chat_action_modifier import NormalChatActionMod
from src.chat.normal_chat.normal_chat_expressor import NormalChatExpressor
from src.chat.focus_chat.replyer.default_replyer import DefaultReplyer
from src.person_info.person_info import PersonInfoManager
from src.chat.utils.chat_message_builder import get_raw_msg_by_timestamp_with_chat, get_raw_msg_by_timestamp_with_chat_inclusive, get_raw_msg_before_timestamp_with_chat, num_new_messages_since
from src.chat.utils.chat_message_builder import (
get_raw_msg_by_timestamp_with_chat,
get_raw_msg_by_timestamp_with_chat_inclusive,
get_raw_msg_before_timestamp_with_chat,
num_new_messages_since,
)
from src.person_info.relationship_manager import get_relationship_manager
willing_manager = get_willing_manager()
@@ -80,13 +85,13 @@ class NormalChat:
# 新的消息段缓存结构:
# {person_id: [{"start_time": float, "end_time": float, "last_msg_time": float, "message_count": int}, ...]}
self.person_engaged_cache: Dict[str, List[Dict[str, any]]] = {}
# 持久化存储文件路径
self.cache_file_path = os.path.join("data", f"relationship_cache_{self.stream_id}.pkl")
# 最后处理的消息时间,避免重复处理相同消息
self.last_processed_message_time = 0.0
# 最后清理时间,用于定期清理老消息段
self.last_cleanup_time = 0.0
@@ -104,19 +109,21 @@ class NormalChat:
# 缓存管理模块
# 负责持久化存储、状态管理、缓存读写
# ================================
def _load_cache(self):
"""从文件加载持久化的缓存"""
if os.path.exists(self.cache_file_path):
try:
with open(self.cache_file_path, 'rb') as f:
with open(self.cache_file_path, "rb") as f:
cache_data = pickle.load(f)
# 新格式:包含额外信息的缓存
self.person_engaged_cache = cache_data.get('person_engaged_cache', {})
self.last_processed_message_time = cache_data.get('last_processed_message_time', 0.0)
self.last_cleanup_time = cache_data.get('last_cleanup_time', 0.0)
self.person_engaged_cache = cache_data.get("person_engaged_cache", {})
self.last_processed_message_time = cache_data.get("last_processed_message_time", 0.0)
self.last_cleanup_time = cache_data.get("last_cleanup_time", 0.0)
logger.info(f"[{self.stream_name}] 成功加载关系缓存,包含 {len(self.person_engaged_cache)} 个用户,最后处理时间:{time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(self.last_processed_message_time)) if self.last_processed_message_time > 0 else '未设置'}")
logger.info(
f"[{self.stream_name}] 成功加载关系缓存,包含 {len(self.person_engaged_cache)} 个用户,最后处理时间:{time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(self.last_processed_message_time)) if self.last_processed_message_time > 0 else '未设置'}"
)
except Exception as e:
logger.error(f"[{self.stream_name}] 加载关系缓存失败: {e}")
self.person_engaged_cache = {}
@@ -129,63 +136,65 @@ class NormalChat:
try:
os.makedirs(os.path.dirname(self.cache_file_path), exist_ok=True)
cache_data = {
'person_engaged_cache': self.person_engaged_cache,
'last_processed_message_time': self.last_processed_message_time,
'last_cleanup_time': self.last_cleanup_time
"person_engaged_cache": self.person_engaged_cache,
"last_processed_message_time": self.last_processed_message_time,
"last_cleanup_time": self.last_cleanup_time,
}
with open(self.cache_file_path, 'wb') as f:
with open(self.cache_file_path, "wb") as f:
pickle.dump(cache_data, f)
logger.debug(f"[{self.stream_name}] 成功保存关系缓存")
except Exception as e:
logger.error(f"[{self.stream_name}] 保存关系缓存失败: {e}")
# ================================
# 消息段管理模块
# 消息段管理模块
# 负责跟踪用户消息活动、管理消息段、清理过期数据
# ================================
def _update_message_segments(self, person_id: str, message_time: float):
"""更新用户的消息段
Args:
person_id: 用户ID
message_time: 消息时间戳
"""
if person_id not in self.person_engaged_cache:
self.person_engaged_cache[person_id] = []
segments = self.person_engaged_cache[person_id]
current_time = time.time()
# 获取该消息前5条消息的时间作为潜在的开始时间
before_messages = get_raw_msg_before_timestamp_with_chat(self.stream_id, message_time, limit=5)
if before_messages:
# 由于get_raw_msg_before_timestamp_with_chat返回按时间升序排序的消息最后一个是最接近message_time的
# 我们需要第一个消息作为开始时间但应该确保至少包含5条消息或该用户之前的消息
potential_start_time = before_messages[0]['time']
potential_start_time = before_messages[0]["time"]
else:
# 如果没有前面的消息,就从当前消息开始
potential_start_time = message_time
# 如果没有现有消息段,创建新的
if not segments:
new_segment = {
"start_time": potential_start_time,
"end_time": message_time,
"last_msg_time": message_time,
"message_count": self._count_messages_in_timerange(potential_start_time, message_time)
"message_count": self._count_messages_in_timerange(potential_start_time, message_time),
}
segments.append(new_segment)
logger.info(f"[{self.stream_name}] 为用户 {person_id} 创建新消息段: 时间范围 {time.strftime('%H:%M:%S', time.localtime(potential_start_time))} - {time.strftime('%H:%M:%S', time.localtime(message_time))}, 消息数: {new_segment['message_count']}")
logger.info(
f"[{self.stream_name}] 为用户 {person_id} 创建新消息段: 时间范围 {time.strftime('%H:%M:%S', time.localtime(potential_start_time))} - {time.strftime('%H:%M:%S', time.localtime(message_time))}, 消息数: {new_segment['message_count']}"
)
self._save_cache()
return
# 获取最后一个消息段
last_segment = segments[-1]
# 计算从最后一条消息到当前消息之间的消息数量(不包含边界)
messages_between = self._count_messages_between(last_segment["last_msg_time"], message_time)
if messages_between <= 10:
# 在10条消息内延伸当前消息段
last_segment["end_time"] = message_time
@@ -203,82 +212,82 @@ class NormalChat:
)
if after_messages and len(after_messages) >= 5:
# 如果有足够的后续消息使用第5条消息的时间作为结束时间
last_segment["end_time"] = after_messages[4]['time']
last_segment["end_time"] = after_messages[4]["time"]
else:
# 如果没有足够的后续消息,保持原有的结束时间
pass
# 重新计算当前消息段的消息数量
last_segment["message_count"] = self._count_messages_in_timerange(
last_segment["start_time"], last_segment["end_time"]
)
# 创建新的消息段
new_segment = {
"start_time": potential_start_time,
"end_time": message_time,
"last_msg_time": message_time,
"message_count": self._count_messages_in_timerange(potential_start_time, message_time)
"message_count": self._count_messages_in_timerange(potential_start_time, message_time),
}
segments.append(new_segment)
logger.info(f"[{self.stream_name}] 为用户 {person_id} 创建新消息段超过10条消息间隔: {new_segment}")
self._save_cache()
def _count_messages_in_timerange(self, start_time: float, end_time: float) -> int:
"""计算指定时间范围内的消息数量(包含边界)"""
messages = get_raw_msg_by_timestamp_with_chat_inclusive(self.stream_id, start_time, end_time)
return len(messages)
def _count_messages_between(self, start_time: float, end_time: float) -> int:
"""计算两个时间点之间的消息数量(不包含边界),用于间隔检查"""
return num_new_messages_since(self.stream_id, start_time, end_time)
def _get_total_message_count(self, person_id: str) -> int:
"""获取用户所有消息段的总消息数量"""
if person_id not in self.person_engaged_cache:
return 0
total_count = 0
for segment in self.person_engaged_cache[person_id]:
total_count += segment["message_count"]
return total_count
def _cleanup_old_segments(self) -> bool:
"""清理老旧的消息段
Returns:
bool: 是否执行了清理操作
"""
if not SEGMENT_CLEANUP_CONFIG["enable_cleanup"]:
return False
current_time = time.time()
# 检查是否需要执行清理(基于时间间隔)
cleanup_interval_seconds = SEGMENT_CLEANUP_CONFIG["cleanup_interval_hours"] * 3600
if current_time - self.last_cleanup_time < cleanup_interval_seconds:
return False
logger.info(f"[{self.stream_name}] 开始执行老消息段清理...")
cleanup_stats = {
"users_cleaned": 0,
"segments_removed": 0,
"total_segments_before": 0,
"total_segments_after": 0
"total_segments_after": 0,
}
max_age_seconds = SEGMENT_CLEANUP_CONFIG["max_segment_age_days"] * 24 * 3600
max_segments_per_user = SEGMENT_CLEANUP_CONFIG["max_segments_per_user"]
users_to_remove = []
for person_id, segments in self.person_engaged_cache.items():
cleanup_stats["total_segments_before"] += len(segments)
original_segment_count = len(segments)
# 1. 按时间清理:移除过期的消息段
segments_after_age_cleanup = []
for segment in segments:
@@ -287,8 +296,10 @@ class NormalChat:
segments_after_age_cleanup.append(segment)
else:
cleanup_stats["segments_removed"] += 1
logger.debug(f"[{self.stream_name}] 移除用户 {person_id} 的过期消息段: {time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(segment['start_time']))} - {time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(segment['end_time']))}")
logger.debug(
f"[{self.stream_name}] 移除用户 {person_id} 的过期消息段: {time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(segment['start_time']))} - {time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(segment['end_time']))}"
)
# 2. 按数量清理:如果消息段数量仍然过多,保留最新的
if len(segments_after_age_cleanup) > max_segments_per_user:
# 按end_time排序保留最新的
@@ -296,10 +307,12 @@ class NormalChat:
segments_removed_count = len(segments_after_age_cleanup) - max_segments_per_user
cleanup_stats["segments_removed"] += segments_removed_count
segments_after_age_cleanup = segments_after_age_cleanup[:max_segments_per_user]
logger.debug(f"[{self.stream_name}] 用户 {person_id} 消息段数量过多,移除 {segments_removed_count} 个最老的消息段")
logger.debug(
f"[{self.stream_name}] 用户 {person_id} 消息段数量过多,移除 {segments_removed_count} 个最老的消息段"
)
# 使用清理后的消息段
# 更新缓存
if len(segments_after_age_cleanup) == 0:
# 如果没有剩余消息段,标记用户为待移除
@@ -307,76 +320,90 @@ class NormalChat:
else:
self.person_engaged_cache[person_id] = segments_after_age_cleanup
cleanup_stats["total_segments_after"] += len(segments_after_age_cleanup)
if original_segment_count != len(segments_after_age_cleanup):
cleanup_stats["users_cleaned"] += 1
# 移除没有消息段的用户
for person_id in users_to_remove:
del self.person_engaged_cache[person_id]
logger.debug(f"[{self.stream_name}] 移除用户 {person_id}:没有剩余消息段")
# 更新最后清理时间
self.last_cleanup_time = current_time
# 保存缓存
if cleanup_stats["segments_removed"] > 0 or len(users_to_remove) > 0:
self._save_cache()
logger.info(f"[{self.stream_name}] 清理完成 - 影响用户: {cleanup_stats['users_cleaned']}, 移除消息段: {cleanup_stats['segments_removed']}, 移除用户: {len(users_to_remove)}")
logger.info(f"[{self.stream_name}] 消息段统计 - 清理前: {cleanup_stats['total_segments_before']}, 清理后: {cleanup_stats['total_segments_after']}")
logger.info(
f"[{self.stream_name}] 清理完成 - 影响用户: {cleanup_stats['users_cleaned']}, 移除消息段: {cleanup_stats['segments_removed']}, 移除用户: {len(users_to_remove)}"
)
logger.info(
f"[{self.stream_name}] 消息段统计 - 清理前: {cleanup_stats['total_segments_before']}, 清理后: {cleanup_stats['total_segments_after']}"
)
else:
logger.debug(f"[{self.stream_name}] 清理完成 - 无需清理任何内容")
return cleanup_stats["segments_removed"] > 0 or len(users_to_remove) > 0
def get_cache_status(self) -> str:
"""获取缓存状态信息,用于调试和监控"""
if not self.person_engaged_cache:
return f"[{self.stream_name}] 关系缓存为空"
status_lines = [f"[{self.stream_name}] 关系缓存状态:"]
status_lines.append(f"最后处理消息时间:{time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(self.last_processed_message_time)) if self.last_processed_message_time > 0 else '未设置'}")
status_lines.append(f"最后清理时间:{time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(self.last_cleanup_time)) if self.last_cleanup_time > 0 else '执行'}")
status_lines.append(
f"最后处理消息时间:{time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(self.last_processed_message_time)) if self.last_processed_message_time > 0 else '设置'}"
)
status_lines.append(
f"最后清理时间:{time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(self.last_cleanup_time)) if self.last_cleanup_time > 0 else '未执行'}"
)
status_lines.append(f"总用户数:{len(self.person_engaged_cache)}")
status_lines.append(f"清理配置:{'启用' if SEGMENT_CLEANUP_CONFIG['enable_cleanup'] else '禁用'} (最大保存{SEGMENT_CLEANUP_CONFIG['max_segment_age_days']}天, 每用户最多{SEGMENT_CLEANUP_CONFIG['max_segments_per_user']}段)")
status_lines.append(
f"清理配置:{'启用' if SEGMENT_CLEANUP_CONFIG['enable_cleanup'] else '禁用'} (最大保存{SEGMENT_CLEANUP_CONFIG['max_segment_age_days']}天, 每用户最多{SEGMENT_CLEANUP_CONFIG['max_segments_per_user']}段)"
)
status_lines.append("")
for person_id, segments in self.person_engaged_cache.items():
total_count = self._get_total_message_count(person_id)
status_lines.append(f"用户 {person_id}:")
status_lines.append(f" 总消息数:{total_count} ({total_count}/45)")
status_lines.append(f" 消息段数:{len(segments)}")
for i, segment in enumerate(segments):
start_str = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(segment['start_time']))
end_str = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(segment['end_time']))
last_str = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(segment['last_msg_time']))
status_lines.append(f"{i+1}: {start_str} -> {end_str} (最后消息: {last_str}, 消息数: {segment['message_count']})")
start_str = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(segment["start_time"]))
end_str = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(segment["end_time"]))
last_str = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(segment["last_msg_time"]))
status_lines.append(
f"{i + 1}: {start_str} -> {end_str} (最后消息: {last_str}, 消息数: {segment['message_count']})"
)
status_lines.append("")
return "\n".join(status_lines)
def _update_user_message_segments(self, message: MessageRecv):
"""更新用户消息段信息"""
current_time = time.time()
time.time()
user_id = message.message_info.user_info.user_id
platform = message.message_info.platform
msg_time = message.message_info.time
# 跳过机器人自己的消息
if user_id == global_config.bot.qq_account:
return
# 只处理新消息(避免重复处理)
if msg_time <= self.last_processed_message_time:
return
person_id = PersonInfoManager.get_person_id(platform, user_id)
self._update_message_segments(person_id, msg_time)
# 更新最后处理时间
self.last_processed_message_time = max(self.last_processed_message_time, msg_time)
logger.debug(f"[{self.stream_name}] 更新用户 {person_id} 的消息段,消息时间:{time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(msg_time))}")
logger.debug(
f"[{self.stream_name}] 更新用户 {person_id} 的消息段,消息时间:{time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(msg_time))}"
)
# 改为实例方法
async def _create_thinking_message(self, message: MessageRecv, timestamp: Optional[float] = None) -> str:
@@ -585,7 +612,7 @@ class NormalChat:
# 执行定期清理
self._cleanup_old_segments()
# 更新消息段信息
self._update_user_message_segments(message)
@@ -1072,12 +1099,10 @@ class NormalChat:
"""获取动作管理器实例"""
return self.action_manager
async def _check_relation_building_conditions(self):
"""检查person_engaged_cache中是否有满足关系构建条件的用户"""
users_to_build_relationship = []
for person_id, segments in list(self.person_engaged_cache.items()):
total_message_count = self._get_total_message_count(person_id)
if total_message_count >= 45:
@@ -1095,9 +1120,7 @@ class NormalChat:
for person_id in users_to_build_relationship:
segments = self.person_engaged_cache[person_id]
# 异步执行关系构建
asyncio.create_task(
self._build_relation_for_person_segments(person_id, segments)
)
asyncio.create_task(self._build_relation_for_person_segments(person_id, segments))
# 移除已处理的用户缓存
del self.person_engaged_cache[person_id]
self._save_cache()
@@ -1108,17 +1131,19 @@ class NormalChat:
logger.info(f"[{self.stream_name}] 开始为 {person_id} 基于 {len(segments)} 个消息段更新印象")
try:
processed_messages = []
for i, segment in enumerate(segments):
start_time = segment["start_time"]
end_time = segment["end_time"]
message_count = segment["message_count"]
start_date = time.strftime('%Y-%m-%d %H:%M', time.localtime(start_time))
segment["message_count"]
start_date = time.strftime("%Y-%m-%d %H:%M", time.localtime(start_time))
# 获取该段的消息(包含边界)
segment_messages = get_raw_msg_by_timestamp_with_chat_inclusive(self.stream_id, start_time, end_time)
logger.info(f"[{self.stream_name}] 消息段 {i+1}: {start_date} - {time.strftime('%Y-%m-%d %H:%M', time.localtime(end_time))}, 消息数: {len(segment_messages)}")
logger.info(
f"[{self.stream_name}] 消息段 {i + 1}: {start_date} - {time.strftime('%Y-%m-%d %H:%M', time.localtime(end_time))}, 消息数: {len(segment_messages)}"
)
if segment_messages:
# 如果不是第一个消息段,在消息列表前添加间隔标识
if i > 0:
@@ -1126,33 +1151,33 @@ class NormalChat:
gap_message = {
"time": start_time - 0.1, # 稍微早于段开始时间
"user_id": "system",
"user_platform": "system",
"user_platform": "system",
"user_nickname": "系统",
"user_cardname": "",
"display_message": f"...(中间省略一些消息){start_date} 之后的消息如下...",
"is_action_record": True,
"chat_info_platform": segment_messages[0].get("chat_info_platform", ""),
"chat_id": self.stream_id
"chat_id": self.stream_id,
}
processed_messages.append(gap_message)
# 添加该段的所有消息
processed_messages.extend(segment_messages)
if processed_messages:
# 按时间排序所有消息(包括间隔标识)
processed_messages.sort(key=lambda x: x['time'])
logger.info(f"[{self.stream_name}] 为 {person_id} 获取到总共 {len(processed_messages)} 条消息(包含间隔标识)用于印象更新")
processed_messages.sort(key=lambda x: x["time"])
logger.info(
f"[{self.stream_name}] 为 {person_id} 获取到总共 {len(processed_messages)} 条消息(包含间隔标识)用于印象更新"
)
relationship_manager = get_relationship_manager()
# 调用原有的更新方法
await relationship_manager.update_person_impression(
person_id=person_id,
timestamp=time.time(),
bot_engaged_messages=processed_messages
person_id=person_id, timestamp=time.time(), bot_engaged_messages=processed_messages
)
logger.info(f"[{self.stream_name}] 用户 {person_id} 关系构建完成")
else:
logger.warning(f"[{self.stream_name}] 没有找到 {person_id} 的消息段对应的消息,不更新印象")