feat:添加配置项以关闭记忆和关系系统

This commit is contained in:
SengokuCola
2025-06-09 22:33:04 +08:00
parent c09b1426a6
commit 968b82cd5b
11 changed files with 112 additions and 76 deletions

View File

@@ -51,7 +51,7 @@ PROCESSOR_CLASSES = {
"ToolProcessor": (ToolProcessor, "tool_use_processor"),
"WorkingMemoryProcessor": (WorkingMemoryProcessor, "working_memory_processor"),
"SelfProcessor": (SelfProcessor, "self_identify_processor"),
"RelationshipProcessor": (RelationshipProcessor, "relationship_processor"),
"RelationshipProcessor": (RelationshipProcessor, "relation_processor"),
}
logger = get_logger("hfc") # Logger Name Changed
@@ -108,10 +108,18 @@ class HeartFChatting:
# 根据配置文件和默认规则确定启用的处理器
config_processor_settings = global_config.focus_chat_processor
self.enabled_processor_names = [
proc_name for proc_name, (_proc_class, config_key) in PROCESSOR_CLASSES.items()
if not config_key or getattr(config_processor_settings, config_key, True)
]
self.enabled_processor_names = []
for proc_name, (_proc_class, config_key) in PROCESSOR_CLASSES.items():
# 对于关系处理器,需要同时检查两个配置项
if proc_name == "RelationshipProcessor":
if (global_config.relationship.enable_relationship and
getattr(config_processor_settings, config_key, True)):
self.enabled_processor_names.append(proc_name)
else:
# 其他处理器的原有逻辑
if not config_key or getattr(config_processor_settings, config_key, True):
self.enabled_processor_names.append(proc_name)
# logger.info(f"{self.log_prefix} 将启用的处理器: {self.enabled_processor_names}")

View File

@@ -1,4 +1,4 @@
from src.chat.memory_system.Hippocampus import HippocampusManager
from src.chat.memory_system.Hippocampus import hippocampus_manager
from src.config.config import global_config
from src.chat.message_receive.message import MessageRecv
from src.chat.message_receive.storage import MessageStorage
@@ -67,21 +67,22 @@ async def _calculate_interest(message: MessageRecv) -> Tuple[float, bool]:
is_mentioned, _ = is_mentioned_bot_in_message(message)
interested_rate = 0.0
with Timer("记忆激活"):
interested_rate = await HippocampusManager.get_instance().get_activate_from_text(
message.processed_plain_text,
fast_retrieval=True,
)
text_len = len(message.processed_plain_text)
# 根据文本长度调整兴趣度长度越大兴趣度越高但增长率递减最低0.01最高0.05
# 采用对数函数实现递减增长
if global_config.memory.enable_memory:
with Timer("记忆激活"):
interested_rate = await hippocampus_manager.get_activate_from_text(
message.processed_plain_text,
fast_retrieval=True,
)
logger.trace(f"记忆激活率: {interested_rate:.2f}")
text_len = len(message.processed_plain_text)
# 根据文本长度调整兴趣度长度越大兴趣度越高但增长率递减最低0.01最高0.05
# 采用对数函数实现递减增长
base_interest = 0.01 + (0.05 - 0.01) * (math.log10(text_len + 1) / math.log10(1000 + 1))
base_interest = min(max(base_interest, 0.01), 0.05)
base_interest = 0.01 + (0.05 - 0.01) * (math.log10(text_len + 1) / math.log10(1000 + 1))
base_interest = min(max(base_interest, 0.01), 0.05)
interested_rate += base_interest
logger.trace(f"记忆激活率: {interested_rate:.2f}")
interested_rate += base_interest
if is_mentioned:
interest_increase_on_mention = 1
@@ -210,7 +211,7 @@ class HeartFCMessageReceiver:
logger.info(f"[{mes_name}]{userinfo.user_nickname}:{message.processed_plain_text}")
# 8. 关系处理
if global_config.relationship.give_name:
if global_config.relationship.enable_relationship and global_config.relationship.give_name:
await _process_relationship(message)
except Exception as e:

View File

@@ -193,8 +193,9 @@ class MindProcessor(BaseProcessor):
# 获取个性化信息
relation_prompt = ""
for person in person_list:
relation_prompt += await relationship_manager.build_relationship_info(person, is_id=True)
if global_config.relationship.enable_relationship:
for person in person_list:
relation_prompt += await relationship_manager.build_relationship_info(person, is_id=True)
template_name = "sub_heartflow_prompt_before" if is_group_chat else "sub_heartflow_prompt_private_before"
logger.debug(f"{self.log_prefix} 使用{'群聊' if is_group_chat else '私聊'}思考模板")

View File

@@ -6,7 +6,7 @@ from src.config.config import global_config
from src.common.logger_manager import get_logger
from src.chat.utils.prompt_builder import Prompt, global_prompt_manager
from datetime import datetime
from src.chat.memory_system.Hippocampus import HippocampusManager
from src.chat.memory_system.Hippocampus import hippocampus_manager
from typing import List, Dict
import difflib
import json
@@ -87,6 +87,10 @@ class MemoryActivator:
Returns:
List[Dict]: 激活的记忆列表
"""
# 如果记忆系统被禁用,直接返回空列表
if not global_config.memory.enable_memory:
return []
obs_info_text = ""
for observation in observations:
if isinstance(observation, ChattingObservation):
@@ -128,10 +132,10 @@ class MemoryActivator:
logger.debug(f"当前激活的记忆关键词: {self.cached_keywords}")
# 调用记忆系统获取相关记忆
related_memory = await HippocampusManager.get_instance().get_memory_from_topic(
related_memory = await hippocampus_manager.get_memory_from_topic(
valid_keywords=keywords, max_memory_num=3, max_memory_length=2, max_depth=3
)
# related_memory = await HippocampusManager.get_instance().get_memory_from_text(
# related_memory = await hippocampus_manager.get_memory_from_text(
# text=obs_info_text, max_memory_num=5, max_memory_length=2, max_depth=3, fast_retrieval=False
# )

View File

@@ -193,7 +193,7 @@ class ActionPlanner(BasePlanner):
prompt = f"{prompt}"
llm_content, (reasoning_content, _) = await self.planner_llm.generate_response_async(prompt=prompt)
# logger.info(f"{self.log_prefix}规划器原始提示词: {prompt}")
logger.info(f"{self.log_prefix}规划器原始提示词: {prompt}")
logger.info(f"{self.log_prefix}规划器原始响应: {llm_content}")
logger.info(f"{self.log_prefix}规划器推理: {reasoning_content}")

View File

@@ -1655,21 +1655,9 @@ class ParahippocampalGyrus:
class HippocampusManager:
_instance = None
_hippocampus = None
_initialized = False
@classmethod
def get_instance(cls):
if cls._instance is None:
cls._instance = cls()
return cls._instance
@classmethod
def get_hippocampus(cls):
if not cls._initialized:
raise RuntimeError("HippocampusManager 尚未初始化,请先调用 initialize 方法")
return cls._hippocampus
def __init__(self):
self._hippocampus = None
self._initialized = False
def initialize(self):
"""初始化海马体实例"""
@@ -1695,6 +1683,11 @@ class HippocampusManager:
return self._hippocampus
def get_hippocampus(self):
if not self._initialized:
raise RuntimeError("HippocampusManager 尚未初始化,请先调用 initialize 方法")
return self._hippocampus
async def build_memory(self):
"""构建记忆的公共接口"""
if not self._initialized:
@@ -1772,3 +1765,7 @@ class HippocampusManager:
if not self._initialized:
raise RuntimeError("HippocampusManager 尚未初始化,请先调用 initialize 方法")
return self._hippocampus.get_all_node_names()
# 创建全局实例
hippocampus_manager = HippocampusManager()

View File

@@ -7,7 +7,7 @@ from src.person_info.relationship_manager import relationship_manager
import time
from src.chat.utils.utils import get_recent_group_speaker
from src.manager.mood_manager import mood_manager
from src.chat.memory_system.Hippocampus import HippocampusManager
from src.chat.memory_system.Hippocampus import hippocampus_manager
from src.chat.knowledge.knowledge_lib import qa_manager
from src.chat.focus_chat.expressors.exprssion_learner import expression_learner
import random
@@ -112,8 +112,9 @@ class PromptBuilder:
)
relation_prompt = ""
for person in who_chat_in_group:
relation_prompt += await relationship_manager.build_relationship_info(person)
if global_config.relationship.enable_relationship:
for person in who_chat_in_group:
relation_prompt += await relationship_manager.build_relationship_info(person)
mood_prompt = mood_manager.get_mood_prompt()
@@ -159,18 +160,19 @@ class PromptBuilder:
)[0]
memory_prompt = ""
related_memory = await HippocampusManager.get_instance().get_memory_from_text(
text=message_txt, max_memory_num=2, max_memory_length=2, max_depth=3, fast_retrieval=False
)
related_memory_info = ""
if related_memory:
for memory in related_memory:
related_memory_info += memory[1]
memory_prompt = await global_prompt_manager.format_prompt(
"memory_prompt", related_memory_info=related_memory_info
if global_config.memory.enable_memory:
related_memory = await hippocampus_manager.get_memory_from_text(
text=message_txt, max_memory_num=2, max_memory_length=2, max_depth=3, fast_retrieval=False
)
related_memory_info = ""
if related_memory:
for memory in related_memory:
related_memory_info += memory[1]
memory_prompt = await global_prompt_manager.format_prompt(
"memory_prompt", related_memory_info=related_memory_info
)
message_list_before_now = get_raw_msg_before_timestamp_with_chat(
chat_id=chat_stream.stream_id,
timestamp=time.time(),