feat:合并自我处理器和关系处理器
This commit is contained in:
@@ -23,7 +23,6 @@ from src.chat.heart_flow.observation.actions_observation import ActionObservatio
|
||||
from src.chat.focus_chat.info_processors.tool_processor import ToolProcessor
|
||||
from src.chat.focus_chat.memory_activator import MemoryActivator
|
||||
from src.chat.focus_chat.info_processors.base_processor import BaseProcessor
|
||||
from src.chat.focus_chat.info_processors.self_processor import SelfProcessor
|
||||
from src.chat.focus_chat.info_processors.expression_selector_processor import ExpressionSelectorProcessor
|
||||
from src.chat.focus_chat.planners.planner_factory import PlannerFactory
|
||||
from src.chat.focus_chat.planners.modify_actions import ActionModifier
|
||||
@@ -45,7 +44,6 @@ PROCESSOR_CLASSES = {
|
||||
"ChattingInfoProcessor": (ChattingInfoProcessor, None),
|
||||
"ToolProcessor": (ToolProcessor, "tool_use_processor"),
|
||||
"WorkingMemoryProcessor": (WorkingMemoryProcessor, "working_memory_processor"),
|
||||
"SelfProcessor": (SelfProcessor, "self_identify_processor"),
|
||||
"RelationshipProcessor": (RelationshipProcessor, "relation_processor"),
|
||||
"ExpressionSelectorProcessor": (ExpressionSelectorProcessor, "expression_selector_processor"),
|
||||
}
|
||||
@@ -184,7 +182,6 @@ class HeartFChatting:
|
||||
if name in [
|
||||
"ToolProcessor",
|
||||
"WorkingMemoryProcessor",
|
||||
"SelfProcessor",
|
||||
"RelationshipProcessor",
|
||||
"ExpressionSelectorProcessor",
|
||||
]:
|
||||
|
||||
@@ -13,6 +13,7 @@ from typing import List
|
||||
from typing import Dict
|
||||
from src.chat.focus_chat.info.info_base import InfoBase
|
||||
from src.chat.focus_chat.info.relation_info import RelationInfo
|
||||
from src.person_info.person_info import PersonInfoManager
|
||||
from json_repair import repair_json
|
||||
from src.person_info.person_info import get_person_info_manager
|
||||
import json
|
||||
@@ -48,10 +49,11 @@ def init_prompt():
|
||||
请不要重复调取相同的信息
|
||||
|
||||
{name_block}
|
||||
请你阅读聊天记录,查看是否需要调取某个人的信息,这个人可以是出现在聊天记录中的,也可以是记录中提到的人。
|
||||
请你阅读聊天记录,查看是否需要调取某个人的信息,这个人可以是出现在聊天记录中的,也可以是记录中提到的人,也可以是你自己({bot_name})。
|
||||
你不同程度上认识群聊里的人,以及他们谈论到的人,你可以根据聊天记录,回忆起有关他们的信息,帮助你参与聊天
|
||||
1.你需要提供用户名和你想要提取的信息名称类型来进行调取
|
||||
2.请注意,提取的信息类型一定要和用户有关,不要提取无关的信息
|
||||
3.你也可以调取有关自己({bot_name})的信息
|
||||
|
||||
请以json格式输出,例如:
|
||||
|
||||
@@ -59,7 +61,7 @@ def init_prompt():
|
||||
"用户A": "ta的昵称",
|
||||
"用户B": "ta对你的态度",
|
||||
"用户D": "你对ta的印象",
|
||||
"person_name": "其他信息",
|
||||
"{bot_name}": "身份",
|
||||
"person_name": "其他信息",
|
||||
}}
|
||||
|
||||
@@ -81,6 +83,18 @@ def init_prompt():
|
||||
"""
|
||||
Prompt(fetch_info_prompt, "fetch_person_info_prompt")
|
||||
|
||||
fetch_bot_info_prompt = """
|
||||
你是{nickname},你的昵称有{alias_names}。
|
||||
以下是你对自己的了解,请你从中提取和"{info_type}"有关的信息,如果无法提取,请输出none:
|
||||
{person_impression_block}
|
||||
{points_text_block}
|
||||
请严格按照以下json输出格式,不要输出多余内容:
|
||||
{{
|
||||
"{info_type}": "有关你自己的{info_type}的信息内容"
|
||||
}}
|
||||
"""
|
||||
Prompt(fetch_bot_info_prompt, "fetch_bot_info_prompt")
|
||||
|
||||
|
||||
class RelationshipProcessor(BaseProcessor):
|
||||
log_prefix = "关系"
|
||||
@@ -549,6 +563,7 @@ class RelationshipProcessor(BaseProcessor):
|
||||
|
||||
prompt = (await global_prompt_manager.get_prompt_async("relationship_prompt")).format(
|
||||
name_block=name_block,
|
||||
bot_name=global_config.bot.nickname,
|
||||
time_now=time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()),
|
||||
chat_observe_info=chat_observe_info,
|
||||
info_cache_block=info_cache_block,
|
||||
@@ -567,17 +582,17 @@ class RelationshipProcessor(BaseProcessor):
|
||||
|
||||
person_info_manager = get_person_info_manager()
|
||||
for person_name, info_type in content_json.items():
|
||||
person_id = person_info_manager.get_person_id_by_person_name(person_name)
|
||||
is_bot = person_name == global_config.bot.nickname or person_name in global_config.bot.alias_names
|
||||
if is_bot:
|
||||
person_id = person_info_manager.get_person_id("system", "bot_id")
|
||||
logger.info(f"{self.log_prefix} 检测到对bot自身({person_name})的信息查询,使用特殊ID。")
|
||||
else:
|
||||
person_id = person_info_manager.get_person_id_by_person_name(person_name)
|
||||
|
||||
if not person_id:
|
||||
logger.warning(f"{self.log_prefix} 未找到用户 {person_name} 的ID,跳过调取信息。")
|
||||
continue
|
||||
|
||||
# 检查是否是bot自己,如果是则跳过
|
||||
user_id = person_info_manager.get_value_sync(person_id, "user_id")
|
||||
if user_id == global_config.bot.qq_account:
|
||||
logger.info(f"{self.log_prefix} 跳过调取bot自己({person_name})的信息。")
|
||||
continue
|
||||
|
||||
self.info_fetching_cache.append(
|
||||
{
|
||||
"person_id": person_id,
|
||||
@@ -747,42 +762,37 @@ class RelationshipProcessor(BaseProcessor):
|
||||
# 首先检查 info_list 缓存
|
||||
info_list = await person_info_manager.get_value(person_id, "info_list") or []
|
||||
cached_info = None
|
||||
person_name = await person_info_manager.get_value(person_id, "person_name")
|
||||
|
||||
print(f"info_list: {info_list}")
|
||||
# print(f"info_list: {info_list}")
|
||||
|
||||
# 查找对应的 info_type
|
||||
for info_item in info_list:
|
||||
if info_item.get("info_type") == info_type:
|
||||
cached_info = info_item.get("info_content")
|
||||
logger.info(f"{self.log_prefix} [缓存命中] 从 info_list 中找到 {info_type} 信息: {cached_info}")
|
||||
logger.debug(f"{self.log_prefix} 在info_list中找到 {person_name} 的 {info_type} 信息: {cached_info}")
|
||||
break
|
||||
|
||||
# 如果缓存中有信息,直接使用
|
||||
if cached_info:
|
||||
person_name = await person_info_manager.get_value(person_id, "person_name")
|
||||
if person_id not in self.info_fetched_cache:
|
||||
self.info_fetched_cache[person_id] = {}
|
||||
|
||||
if cached_info == "none":
|
||||
unknow = True
|
||||
else:
|
||||
unknow = False
|
||||
|
||||
self.info_fetched_cache[person_id][info_type] = {
|
||||
"info": cached_info,
|
||||
"ttl": 8,
|
||||
"ttl": 4,
|
||||
"start_time": start_time,
|
||||
"person_name": person_name,
|
||||
"unknow": unknow,
|
||||
"unknow": cached_info == "none",
|
||||
}
|
||||
logger.info(f"{self.log_prefix} [缓存使用] 直接使用缓存的 {person_name} 的 {info_type}: {cached_info}")
|
||||
logger.info(f"{self.log_prefix} 记得 {person_name} 的 {info_type}: {cached_info}")
|
||||
return
|
||||
|
||||
logger.info(f"{self.log_prefix} [缓存命中] 缓存中没有信息")
|
||||
|
||||
bot_person_id = PersonInfoManager.get_person_id("system", "bot_id")
|
||||
is_bot = person_id == bot_person_id
|
||||
|
||||
try:
|
||||
nickname_str = ",".join(global_config.bot.alias_names)
|
||||
name_block = f"你的名字是{global_config.bot.nickname},你的昵称有{nickname_str},有人也会用这些昵称称呼你。"
|
||||
person_name = await person_info_manager.get_value(person_id, "person_name")
|
||||
person_impression = await person_info_manager.get_value(person_id, "impression")
|
||||
if person_impression:
|
||||
@@ -804,31 +814,43 @@ class RelationshipProcessor(BaseProcessor):
|
||||
self.info_fetched_cache[person_id] = {}
|
||||
self.info_fetched_cache[person_id][info_type] = {
|
||||
"info": "none",
|
||||
"ttl": 8,
|
||||
"ttl": 4,
|
||||
"start_time": start_time,
|
||||
"person_name": person_name,
|
||||
"unknow": True,
|
||||
}
|
||||
logger.info(f"{self.log_prefix} 完全不认识 {person_name}")
|
||||
await self._save_info_to_cache(person_id, info_type, "none")
|
||||
return
|
||||
|
||||
prompt = (await global_prompt_manager.get_prompt_async("fetch_person_info_prompt")).format(
|
||||
name_block=name_block,
|
||||
info_type=info_type,
|
||||
person_impression_block=person_impression_block,
|
||||
person_name=person_name,
|
||||
info_json_str=f'"{info_type}": "有关{info_type}的信息内容"',
|
||||
points_text_block=points_text_block,
|
||||
)
|
||||
if is_bot:
|
||||
prompt = (await global_prompt_manager.get_prompt_async("fetch_bot_info_prompt")).format(
|
||||
nickname=global_config.bot.nickname,
|
||||
alias_names=",".join(global_config.bot.alias_names),
|
||||
info_type=info_type,
|
||||
person_impression_block=person_impression_block,
|
||||
points_text_block=points_text_block,
|
||||
)
|
||||
else:
|
||||
nickname_str = ",".join(global_config.bot.alias_names)
|
||||
name_block = f"你的名字是{global_config.bot.nickname},你的昵称有{nickname_str},有人也会用这些昵称称呼你。"
|
||||
prompt = (await global_prompt_manager.get_prompt_async("fetch_person_info_prompt")).format(
|
||||
name_block=name_block,
|
||||
info_type=info_type,
|
||||
person_impression_block=person_impression_block,
|
||||
person_name=person_name,
|
||||
info_json_str=f'"{info_type}": "有关{info_type}的信息内容"',
|
||||
points_text_block=points_text_block,
|
||||
)
|
||||
except Exception:
|
||||
logger.error(traceback.format_exc())
|
||||
|
||||
print(prompt)
|
||||
return
|
||||
|
||||
try:
|
||||
# 使用小模型进行即时提取
|
||||
content, _ = await self.instant_llm_model.generate_response_async(prompt=prompt)
|
||||
|
||||
logger.info(f"{self.log_prefix} [LLM提取] {person_name} 的 {info_type} 结果: {content}")
|
||||
|
||||
|
||||
if content:
|
||||
content_json = json.loads(repair_json(content))
|
||||
@@ -851,17 +873,15 @@ class RelationshipProcessor(BaseProcessor):
|
||||
await self._save_info_to_cache(person_id, info_type, info_content if not is_unknown else "none")
|
||||
|
||||
if not is_unknown:
|
||||
logger.info(
|
||||
f"{self.log_prefix} [LLM提取] 成功获取并缓存 {person_name} 的 {info_type}: {info_content}"
|
||||
)
|
||||
logger.info(f"{self.log_prefix} 思考得到,{person_name} 的 {info_type}: {content}")
|
||||
else:
|
||||
logger.info(f"{self.log_prefix} [LLM提取] {person_name} 的 {info_type} 信息不明确")
|
||||
logger.info(f"{self.log_prefix} 思考了也不知道{person_name} 的 {info_type} 信息")
|
||||
else:
|
||||
logger.warning(
|
||||
f"{self.log_prefix} [LLM提取] 小模型返回空结果,获取 {person_name} 的 {info_type} 信息失败。"
|
||||
f"{self.log_prefix} 小模型返回空结果,获取 {person_name} 的 {info_type} 信息失败。"
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"{self.log_prefix} [LLM提取] 执行小模型请求获取用户信息时出错: {e}")
|
||||
logger.error(f"{self.log_prefix} 执行小模型请求获取用户信息时出错: {e}")
|
||||
logger.error(traceback.format_exc())
|
||||
|
||||
async def _save_info_to_cache(self, person_id: str, info_type: str, info_content: str):
|
||||
|
||||
@@ -1,184 +0,0 @@
|
||||
from src.chat.heart_flow.observation.chatting_observation import ChattingObservation
|
||||
from src.chat.heart_flow.observation.observation import Observation
|
||||
from src.llm_models.utils_model import LLMRequest
|
||||
from src.config.config import global_config
|
||||
import time
|
||||
import traceback
|
||||
from src.common.logger import get_logger
|
||||
from src.chat.utils.prompt_builder import Prompt, global_prompt_manager
|
||||
from src.chat.message_receive.chat_stream import get_chat_manager
|
||||
from .base_processor import BaseProcessor
|
||||
from typing import List, Dict
|
||||
from src.chat.heart_flow.observation.hfcloop_observation import HFCloopObservation
|
||||
from src.chat.focus_chat.info.info_base import InfoBase
|
||||
from src.chat.focus_chat.info.self_info import SelfInfo
|
||||
from src.individuality.individuality import get_individuality
|
||||
|
||||
logger = get_logger("processor")
|
||||
|
||||
|
||||
def init_prompt():
|
||||
indentify_prompt = """
|
||||
{time_now},以下是正在进行的聊天内容:
|
||||
<聊天记录>
|
||||
{chat_observe_info}
|
||||
</聊天记录>
|
||||
|
||||
{name_block}
|
||||
请你根据以上聊天记录,思考聊天记录中是否有人提到你自己相关的信息,或者有人询问你的相关信息。
|
||||
|
||||
请选择你需要查询的关键词来回答聊天中的问题。如果需要多个关键词,请用逗号隔开。
|
||||
如果聊天中没有涉及任何关于你的问题,请输出none。
|
||||
|
||||
现在请输出你要查询的关键词,注意只输出关键词就好,不要输出其他内容:
|
||||
"""
|
||||
Prompt(indentify_prompt, "indentify_prompt")
|
||||
|
||||
|
||||
class SelfProcessor(BaseProcessor):
|
||||
log_prefix = "自我认同"
|
||||
|
||||
def __init__(self, subheartflow_id: str):
|
||||
super().__init__()
|
||||
|
||||
self.subheartflow_id = subheartflow_id
|
||||
|
||||
self.info_fetched_cache: Dict[str, Dict[str, any]] = {}
|
||||
|
||||
self.llm_model = LLMRequest(
|
||||
model=global_config.model.utils_small,
|
||||
request_type="focus.processor.self_identify",
|
||||
)
|
||||
|
||||
name = get_chat_manager().get_stream_name(self.subheartflow_id)
|
||||
self.log_prefix = f"[{name}] "
|
||||
|
||||
async def process_info(self, observations: List[Observation] = None, *infos) -> List[InfoBase]:
|
||||
"""处理信息对象
|
||||
|
||||
Args:
|
||||
*infos: 可变数量的InfoBase类型的信息对象
|
||||
|
||||
Returns:
|
||||
List[InfoBase]: 处理后的结构化信息列表
|
||||
"""
|
||||
self_info_str = await self.self_indentify(observations)
|
||||
|
||||
if self_info_str:
|
||||
self_info = SelfInfo()
|
||||
self_info.set_self_info(self_info_str)
|
||||
else:
|
||||
self_info = None
|
||||
return None
|
||||
|
||||
return [self_info]
|
||||
|
||||
async def self_indentify(
|
||||
self,
|
||||
observations: List[Observation] = None,
|
||||
):
|
||||
"""
|
||||
在回复前进行思考,生成内心想法并收集工具调用结果
|
||||
|
||||
参数:
|
||||
observations: 观察信息
|
||||
|
||||
返回:
|
||||
如果return_prompt为False:
|
||||
tuple: (current_mind, past_mind) 当前想法和过去的想法列表
|
||||
如果return_prompt为True:
|
||||
tuple: (current_mind, past_mind, prompt) 当前想法、过去的想法列表和使用的prompt
|
||||
"""
|
||||
|
||||
if observations is None:
|
||||
observations = []
|
||||
for observation in observations:
|
||||
if isinstance(observation, ChattingObservation):
|
||||
# 获取聊天元信息
|
||||
is_group_chat = observation.is_group_chat
|
||||
chat_target_info = observation.chat_target_info
|
||||
chat_target_name = "对方" # 私聊默认名称
|
||||
if not is_group_chat and chat_target_info:
|
||||
# 优先使用person_name,其次user_nickname,最后回退到默认值
|
||||
chat_target_name = (
|
||||
chat_target_info.get("person_name") or chat_target_info.get("user_nickname") or chat_target_name
|
||||
)
|
||||
# 获取聊天内容
|
||||
chat_observe_info = observation.get_observe_info()
|
||||
if isinstance(observation, HFCloopObservation):
|
||||
pass
|
||||
|
||||
nickname_str = ""
|
||||
for nicknames in global_config.bot.alias_names:
|
||||
nickname_str += f"{nicknames},"
|
||||
name_block = f"你的名字是{global_config.bot.nickname},你的昵称有{nickname_str},有人也会用这些昵称称呼你。"
|
||||
|
||||
# 获取所有可用的关键词
|
||||
individuality = get_individuality()
|
||||
available_keywords = individuality.get_all_keywords()
|
||||
available_keywords_str = "、".join(available_keywords) if available_keywords else "暂无关键词"
|
||||
|
||||
prompt = (await global_prompt_manager.get_prompt_async("indentify_prompt")).format(
|
||||
name_block=name_block,
|
||||
time_now=time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()),
|
||||
chat_observe_info=chat_observe_info[-200:],
|
||||
available_keywords=available_keywords_str,
|
||||
bot_name=global_config.bot.nickname,
|
||||
)
|
||||
|
||||
keyword = ""
|
||||
|
||||
try:
|
||||
keyword, _ = await self.llm_model.generate_response_async(prompt=prompt)
|
||||
|
||||
# print(f"prompt: {prompt}\nkeyword: {keyword}")
|
||||
|
||||
if not keyword:
|
||||
logger.warning(f"{self.log_prefix} LLM返回空结果,自我识别失败。")
|
||||
except Exception as e:
|
||||
# 处理总体异常
|
||||
logger.error(f"{self.log_prefix} 执行LLM请求或处理响应时出错: {e}")
|
||||
logger.error(traceback.format_exc())
|
||||
keyword = "我是谁,我从哪来,要到哪去"
|
||||
|
||||
# 解析关键词
|
||||
keyword = keyword.strip()
|
||||
if not keyword or keyword == "none":
|
||||
keyword_set = []
|
||||
else:
|
||||
# 只保留非空关键词,去除多余空格
|
||||
keyword_set = [k.strip() for k in keyword.split(",") if k.strip()]
|
||||
|
||||
# 从individuality缓存中查询关键词信息
|
||||
for keyword in keyword_set:
|
||||
if keyword not in self.info_fetched_cache:
|
||||
# 直接从individuality的json缓存中获取关键词信息
|
||||
fetched_info = individuality.get_keyword_info(keyword)
|
||||
|
||||
if fetched_info:
|
||||
self.info_fetched_cache[keyword] = {
|
||||
"info": fetched_info,
|
||||
"ttl": 5,
|
||||
}
|
||||
logger.info(f"{self.log_prefix} 从个体特征缓存中获取关键词 '{keyword}' 的信息")
|
||||
|
||||
# 管理TTL(生存时间)
|
||||
expired_keywords = []
|
||||
for fetched_keyword, info in self.info_fetched_cache.items():
|
||||
if info["ttl"] > 0:
|
||||
info["ttl"] -= 1
|
||||
else:
|
||||
expired_keywords.append(fetched_keyword)
|
||||
|
||||
# 删除过期的关键词
|
||||
for expired_keyword in expired_keywords:
|
||||
del self.info_fetched_cache[expired_keyword]
|
||||
|
||||
fetched_info_str = ""
|
||||
for keyword, info in self.info_fetched_cache.items():
|
||||
fetched_info_str += f"你的:{keyword}信息是: {info['info']}\n"
|
||||
|
||||
return fetched_info_str
|
||||
|
||||
|
||||
init_prompt()
|
||||
@@ -25,34 +25,10 @@ install(extra_lines=3)
|
||||
|
||||
|
||||
def init_prompt():
|
||||
# Prompt(
|
||||
# """
|
||||
# {time_block}
|
||||
# {indentify_block}你现在正在参与以下的聊天,以下是具体的聊天内容:
|
||||
|
||||
# {chat_content_block}
|
||||
|
||||
# {self_info_block}
|
||||
# {relation_info_block}
|
||||
|
||||
# {cycle_info_block}
|
||||
|
||||
# {moderation_prompt}
|
||||
# 注意,除了下面动作选项之外,你在群聊里不能做其他任何事情,这是你能力的边界,现在请你选择合适的action:
|
||||
|
||||
# {action_options_text}
|
||||
|
||||
# 请以动作的输出要求,以严格的 JSON 格式输出,且仅包含 JSON 内容。
|
||||
# 请输出你提取的JSON,不要有任何其他文字或解释:
|
||||
|
||||
# """,
|
||||
# "simple_planner_prompt",
|
||||
# )
|
||||
|
||||
Prompt(
|
||||
"""
|
||||
{time_block}
|
||||
{indentify_block}你现在正在B站进行直播,你是一个虚拟主播,以下是直播间内容和电脑屏幕内容:
|
||||
{indentify_block}你现在正在参与以下的聊天,以下是具体的聊天内容:
|
||||
|
||||
{chat_content_block}
|
||||
|
||||
@@ -62,7 +38,7 @@ def init_prompt():
|
||||
{cycle_info_block}
|
||||
|
||||
{moderation_prompt}
|
||||
注意,除了下面动作选项之外,你在直播间里不能做其他任何事情,这是你能力的边界,现在请你选择合适的action:
|
||||
注意,除了下面动作选项之外,你在群聊里不能做其他任何事情,这是你能力的边界,现在请你选择合适的action:
|
||||
|
||||
{action_options_text}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user