🤖 自动格式化代码 [skip ci]
This commit is contained in:
@@ -536,14 +536,16 @@ class RelationshipProcessor(BaseProcessor):
|
||||
# 对于每个(person_id, info_type)组合,只保留最新的记录
|
||||
latest_records = {}
|
||||
for info_fetching in self.info_fetching_cache:
|
||||
key = (info_fetching['person_id'], info_fetching['info_type'])
|
||||
if key not in latest_records or info_fetching['start_time'] > latest_records[key]['start_time']:
|
||||
key = (info_fetching["person_id"], info_fetching["info_type"])
|
||||
if key not in latest_records or info_fetching["start_time"] > latest_records[key]["start_time"]:
|
||||
latest_records[key] = info_fetching
|
||||
|
||||
# 按时间排序并生成显示文本
|
||||
sorted_records = sorted(latest_records.values(), key=lambda x: x['start_time'])
|
||||
sorted_records = sorted(latest_records.values(), key=lambda x: x["start_time"])
|
||||
for info_fetching in sorted_records:
|
||||
info_cache_block += f"你已经调取了[{info_fetching['person_name']}]的[{info_fetching['info_type']}]信息\n"
|
||||
info_cache_block += (
|
||||
f"你已经调取了[{info_fetching['person_name']}]的[{info_fetching['info_type']}]信息\n"
|
||||
)
|
||||
|
||||
prompt = (await global_prompt_manager.get_prompt_async("relationship_prompt")).format(
|
||||
name_block=name_block,
|
||||
@@ -625,7 +627,6 @@ class RelationshipProcessor(BaseProcessor):
|
||||
if person_infos_str:
|
||||
persons_infos_str += f"你对 {person_name} 的了解:{person_infos_str}\n"
|
||||
|
||||
|
||||
return persons_infos_str
|
||||
|
||||
# ================================
|
||||
@@ -747,9 +748,8 @@ class RelationshipProcessor(BaseProcessor):
|
||||
logger.info(f"{self.log_prefix} [缓存命中] 从 info_list 中找到 {info_type} 信息: {cached_info}")
|
||||
break
|
||||
|
||||
|
||||
# 如果缓存中有信息,直接使用
|
||||
if cached_info :
|
||||
if cached_info:
|
||||
person_name = await person_info_manager.get_value(person_id, "person_name")
|
||||
if person_id not in self.info_fetched_cache:
|
||||
self.info_fetched_cache[person_id] = {}
|
||||
@@ -777,7 +777,9 @@ class RelationshipProcessor(BaseProcessor):
|
||||
person_name = await person_info_manager.get_value(person_id, "person_name")
|
||||
person_impression = await person_info_manager.get_value(person_id, "impression")
|
||||
if person_impression:
|
||||
person_impression_block = f"<对{person_name}的总体了解>\n{person_impression}\n</对{person_name}的总体了解>"
|
||||
person_impression_block = (
|
||||
f"<对{person_name}的总体了解>\n{person_impression}\n</对{person_name}的总体了解>"
|
||||
)
|
||||
else:
|
||||
person_impression_block = ""
|
||||
|
||||
@@ -840,11 +842,15 @@ class RelationshipProcessor(BaseProcessor):
|
||||
await self._save_info_to_cache(person_id, info_type, info_content if not is_unknown else "none")
|
||||
|
||||
if not is_unknown:
|
||||
logger.info(f"{self.log_prefix} [LLM提取] 成功获取并缓存 {person_name} 的 {info_type}: {info_content}")
|
||||
logger.info(
|
||||
f"{self.log_prefix} [LLM提取] 成功获取并缓存 {person_name} 的 {info_type}: {info_content}"
|
||||
)
|
||||
else:
|
||||
logger.info(f"{self.log_prefix} [LLM提取] {person_name} 的 {info_type} 信息不明确")
|
||||
else:
|
||||
logger.warning(f"{self.log_prefix} [LLM提取] 小模型返回空结果,获取 {person_name} 的 {info_type} 信息失败。")
|
||||
logger.warning(
|
||||
f"{self.log_prefix} [LLM提取] 小模型返回空结果,获取 {person_name} 的 {info_type} 信息失败。"
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"{self.log_prefix} [LLM提取] 执行小模型请求获取用户信息时出错: {e}")
|
||||
logger.error(traceback.format_exc())
|
||||
@@ -894,5 +900,4 @@ class RelationshipProcessor(BaseProcessor):
|
||||
logger.error(traceback.format_exc())
|
||||
|
||||
|
||||
|
||||
init_prompt()
|
||||
|
||||
@@ -18,7 +18,6 @@ logger = get_logger("processor")
|
||||
|
||||
|
||||
def init_prompt():
|
||||
|
||||
indentify_prompt = """
|
||||
{time_now},以下是正在进行的聊天内容:
|
||||
<聊天记录>
|
||||
@@ -57,8 +56,6 @@ class SelfProcessor(BaseProcessor):
|
||||
name = get_chat_manager().get_stream_name(self.subheartflow_id)
|
||||
self.log_prefix = f"[{name}] "
|
||||
|
||||
|
||||
|
||||
async def process_info(self, observations: List[Observation] = None, *infos) -> List[InfoBase]:
|
||||
"""处理信息对象
|
||||
|
||||
@@ -129,7 +126,7 @@ class SelfProcessor(BaseProcessor):
|
||||
time_now=time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()),
|
||||
chat_observe_info=chat_observe_info[-200:],
|
||||
available_keywords=available_keywords_str,
|
||||
bot_name = global_config.bot.nickname
|
||||
bot_name=global_config.bot.nickname,
|
||||
)
|
||||
|
||||
keyword = ""
|
||||
@@ -147,7 +144,6 @@ class SelfProcessor(BaseProcessor):
|
||||
logger.error(traceback.format_exc())
|
||||
keyword = "我是谁,我从哪来,要到哪去"
|
||||
|
||||
|
||||
# 解析关键词
|
||||
keyword = keyword.strip()
|
||||
if not keyword or keyword == "none":
|
||||
@@ -181,7 +177,6 @@ class SelfProcessor(BaseProcessor):
|
||||
for expired_keyword in expired_keywords:
|
||||
del self.info_fetched_cache[expired_keyword]
|
||||
|
||||
|
||||
fetched_info_str = ""
|
||||
for keyword, info in self.info_fetched_cache.items():
|
||||
fetched_info_str += f"你的:{keyword}信息是: {info['info']}\n"
|
||||
@@ -189,5 +184,4 @@ class SelfProcessor(BaseProcessor):
|
||||
return fetched_info_str
|
||||
|
||||
|
||||
|
||||
init_prompt()
|
||||
|
||||
@@ -342,11 +342,8 @@ class ActionPlanner(BasePlanner):
|
||||
else:
|
||||
chat_content_block = "你还未开始聊天"
|
||||
|
||||
|
||||
action_options_block = ""
|
||||
for using_actions_name, using_actions_info in current_available_actions.items():
|
||||
|
||||
|
||||
using_action_prompt = await global_prompt_manager.get_prompt_async("action_prompt")
|
||||
|
||||
if using_actions_info["parameters"]:
|
||||
|
||||
@@ -7,7 +7,6 @@ import json
|
||||
import os
|
||||
import hashlib
|
||||
import traceback
|
||||
import time
|
||||
from rich.traceback import install
|
||||
from src.chat.utils.prompt_builder import Prompt, global_prompt_manager
|
||||
from src.manager.async_task_manager import AsyncTask
|
||||
@@ -19,11 +18,10 @@ install(extra_lines=3)
|
||||
|
||||
logger = get_logger("individuality")
|
||||
|
||||
|
||||
def init_prompt():
|
||||
"""初始化用于关键词提取的prompts"""
|
||||
|
||||
|
||||
|
||||
extract_keywords_prompt = """
|
||||
请分析以下对某人的描述,提取出其中的独立关键词。每个关键词应该是可以用来从某一角度概括的方面:性格,身高,喜好,外貌,身份,兴趣,爱好,习惯,等等。
|
||||
|
||||
@@ -49,7 +47,6 @@ def init_prompt():
|
||||
Prompt(fetch_info_prompt, "fetch_info_prompt")
|
||||
|
||||
|
||||
|
||||
class Individuality:
|
||||
"""个体特征管理类"""
|
||||
|
||||
@@ -265,18 +262,17 @@ class Individuality:
|
||||
def _get_config_hash(self, personality_sides: list, identity_detail: list) -> str:
|
||||
"""获取当前personality和identity配置的哈希值"""
|
||||
# 将配置转换为字符串并排序,确保一致性
|
||||
config_str = json.dumps({
|
||||
"personality_sides": sorted(personality_sides),
|
||||
"identity_detail": sorted(identity_detail)
|
||||
}, sort_keys=True)
|
||||
config_str = json.dumps(
|
||||
{"personality_sides": sorted(personality_sides), "identity_detail": sorted(identity_detail)}, sort_keys=True
|
||||
)
|
||||
|
||||
return hashlib.md5(config_str.encode('utf-8')).hexdigest()
|
||||
return hashlib.md5(config_str.encode("utf-8")).hexdigest()
|
||||
|
||||
def _load_meta_info(self) -> dict:
|
||||
"""从JSON文件中加载元信息"""
|
||||
if os.path.exists(self.meta_info_file_path):
|
||||
try:
|
||||
with open(self.meta_info_file_path, 'r', encoding='utf-8') as f:
|
||||
with open(self.meta_info_file_path, "r", encoding="utf-8") as f:
|
||||
return json.load(f)
|
||||
except Exception as e:
|
||||
print(f"读取meta_info文件失败: {e}")
|
||||
@@ -288,7 +284,7 @@ class Individuality:
|
||||
try:
|
||||
# 确保目录存在
|
||||
os.makedirs(os.path.dirname(self.meta_info_file_path), exist_ok=True)
|
||||
with open(self.meta_info_file_path, 'w', encoding='utf-8') as f:
|
||||
with open(self.meta_info_file_path, "w", encoding="utf-8") as f:
|
||||
json.dump(meta_info, f, ensure_ascii=False, indent=2)
|
||||
except Exception as e:
|
||||
print(f"保存meta_info文件失败: {e}")
|
||||
@@ -301,26 +297,26 @@ class Individuality:
|
||||
stored_config_hash = meta_info.get("config_hash", "")
|
||||
|
||||
if current_config_hash != stored_config_hash:
|
||||
logger.info(f"检测到personality或identity配置发生变化,清空fetch_info数据")
|
||||
logger.info("检测到personality或identity配置发生变化,清空fetch_info数据")
|
||||
|
||||
# 清空fetch_info文件
|
||||
if os.path.exists(self.fetch_info_file_path):
|
||||
try:
|
||||
os.remove(self.fetch_info_file_path)
|
||||
logger.info(f"已清空fetch_info文件")
|
||||
logger.info("已清空fetch_info文件")
|
||||
except Exception as e:
|
||||
logger.error(f"清空fetch_info文件失败: {e}")
|
||||
|
||||
# 更新元信息
|
||||
meta_info["config_hash"] = current_config_hash
|
||||
self._save_meta_info(meta_info)
|
||||
logger.info(f"已更新配置哈希值")
|
||||
logger.info("已更新配置哈希值")
|
||||
|
||||
def _load_fetch_info_from_file(self) -> dict:
|
||||
"""从JSON文件中加载已保存的fetch_info数据"""
|
||||
if os.path.exists(self.fetch_info_file_path):
|
||||
try:
|
||||
with open(self.fetch_info_file_path, 'r', encoding='utf-8') as f:
|
||||
with open(self.fetch_info_file_path, "r", encoding="utf-8") as f:
|
||||
data = json.load(f)
|
||||
# 兼容旧格式:如果是字符串则转换为列表
|
||||
for keyword, value in data.items():
|
||||
@@ -337,7 +333,7 @@ class Individuality:
|
||||
try:
|
||||
# 确保目录存在
|
||||
os.makedirs(os.path.dirname(self.fetch_info_file_path), exist_ok=True)
|
||||
with open(self.fetch_info_file_path, 'w', encoding='utf-8') as f:
|
||||
with open(self.fetch_info_file_path, "w", encoding="utf-8") as f:
|
||||
json.dump(fetch_info_data, f, ensure_ascii=False, indent=2)
|
||||
except Exception as e:
|
||||
logger.error(f"保存fetch_info文件失败: {e}")
|
||||
@@ -345,7 +341,6 @@ class Individuality:
|
||||
async def _preprocess_personality_keywords(self, personality_sides: list, identity_detail: list):
|
||||
"""预处理personality关键词,提取关键词并生成缓存"""
|
||||
try:
|
||||
|
||||
logger.info("开始预处理personality关键词...")
|
||||
|
||||
# 检查配置变化
|
||||
@@ -452,7 +447,7 @@ class Individuality:
|
||||
prompt_personality=personality_sides_str,
|
||||
indentify_block=identity_detail_str,
|
||||
keyword=keyword,
|
||||
bot_name=self.name
|
||||
bot_name=self.name,
|
||||
)
|
||||
|
||||
fetched_info, _ = await llm_model.generate_response_async(prompt=fetch_prompt)
|
||||
@@ -478,7 +473,9 @@ class Individuality:
|
||||
# 保存合并后的数据到文件和内存缓存
|
||||
if updated_count > 0 or new_count > 0:
|
||||
self._save_fetch_info_to_file(fetch_info_data)
|
||||
logger.info(f"预处理完成,新增 {new_count} 个关键词,追加 {updated_count} 个关键词信息,总计 {len(fetch_info_data)} 个关键词")
|
||||
logger.info(
|
||||
f"预处理完成,新增 {new_count} 个关键词,追加 {updated_count} 个关键词信息,总计 {len(fetch_info_data)} 个关键词"
|
||||
)
|
||||
else:
|
||||
logger.info("预处理完成,但没有生成任何新的有效信息")
|
||||
|
||||
@@ -488,6 +485,7 @@ class Individuality:
|
||||
|
||||
# 注册定时任务(延迟执行,避免阻塞初始化)
|
||||
import asyncio
|
||||
|
||||
asyncio.create_task(self._register_keyword_update_task_delayed())
|
||||
|
||||
except Exception as e:
|
||||
@@ -499,16 +497,18 @@ class Individuality:
|
||||
try:
|
||||
# 等待一小段时间确保系统完全初始化
|
||||
import asyncio
|
||||
|
||||
await asyncio.sleep(5)
|
||||
|
||||
from src.manager.async_task_manager import async_task_manager
|
||||
|
||||
logger = get_logger("individuality")
|
||||
|
||||
# 创建定时任务
|
||||
task = KeywordUpdateTask(
|
||||
personality_sides=list(global_config.personality.personality_sides),
|
||||
identity_detail=list(global_config.identity.identity_detail),
|
||||
individuality_instance=self
|
||||
individuality_instance=self,
|
||||
)
|
||||
|
||||
# 注册任务
|
||||
@@ -555,7 +555,7 @@ class KeywordUpdateTask(AsyncTask):
|
||||
super().__init__(
|
||||
task_name="keyword_update_task",
|
||||
wait_before_start=3600, # 1小时后开始
|
||||
run_interval=3600 # 每小时运行一次
|
||||
run_interval=3600, # 每小时运行一次
|
||||
)
|
||||
|
||||
self.personality_sides = personality_sides
|
||||
@@ -571,6 +571,7 @@ class KeywordUpdateTask(AsyncTask):
|
||||
"""执行任务"""
|
||||
try:
|
||||
from src.common.logger import get_logger
|
||||
|
||||
logger = get_logger("individuality.task")
|
||||
|
||||
# 检查是否超过最大运行次数
|
||||
|
||||
@@ -284,10 +284,9 @@ class RelationshipManager:
|
||||
# 添加可读时间到每个point
|
||||
points_list = [(item["point"], float(item["weight"]), current_time) for item in points_data]
|
||||
|
||||
|
||||
logger_str=f"了解了有关{person_name}的新印象:\n"
|
||||
logger_str = f"了解了有关{person_name}的新印象:\n"
|
||||
for point in points_list:
|
||||
logger_str+=f"{point[0]},重要性:{point[1]}\n\n"
|
||||
logger_str += f"{point[0]},重要性:{point[1]}\n\n"
|
||||
logger.info("logger_str")
|
||||
|
||||
except json.JSONDecodeError:
|
||||
@@ -476,7 +475,6 @@ class RelationshipManager:
|
||||
person_id, "forgotten_points", json.dumps(forgotten_points, ensure_ascii=False, indent=None)
|
||||
)
|
||||
|
||||
|
||||
# 更新数据库
|
||||
await person_info_manager.update_one_field(
|
||||
person_id, "points", json.dumps(current_points, ensure_ascii=False, indent=None)
|
||||
|
||||
Reference in New Issue
Block a user