From 61e0dbe372eb897903683405ff5e95f19a1cdbac Mon Sep 17 00:00:00 2001 From: SengokuCola <1026294844@qq.com> Date: Fri, 16 May 2025 23:16:47 +0800 Subject: [PATCH] =?UTF-8?q?fix=EF=BC=9A=E4=BF=AE=E5=A4=8D=E5=90=88?= =?UTF-8?q?=E5=B9=B6=E9=97=AE=E9=A2=98?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/api/config_api.py | 4 ++-- src/chat/focus_chat/expressors/default_expressor.py | 4 ++-- src/chat/focus_chat/heartflow_prompt_builder.py | 1 - .../focus_chat/info_processors/chattinginfo_processor.py | 4 ++-- src/chat/focus_chat/info_processors/self_processor.py | 4 ++-- .../focus_chat/info_processors/working_memory_processor.py | 6 +++--- src/chat/focus_chat/planners/planner.py | 4 ++-- src/chat/focus_chat/working_memory/memory_manager.py | 2 +- src/chat/heart_flow/observation/chatting_observation.py | 2 +- src/chat/memory_system/Hippocampus.py | 6 +++--- src/chat/message_receive/bot.py | 7 +++++-- src/chat/person_info/relationship_manager.py | 2 +- src/experimental/PFC/pfc.py | 2 +- 13 files changed, 25 insertions(+), 23 deletions(-) diff --git a/src/api/config_api.py b/src/api/config_api.py index 0b23fb993..b81d28da9 100644 --- a/src/api/config_api.py +++ b/src/api/config_api.py @@ -128,7 +128,7 @@ class APIBotConfig: llm_reasoning: Dict[str, Any] # 推理模型配置 llm_normal: Dict[str, Any] # 普通模型配置 llm_topic_judge: Dict[str, Any] # 主题判断模型配置 - llm_summary: Dict[str, Any] # 总结模型配置 + model.summary: Dict[str, Any] # 总结模型配置 vlm: Dict[str, Any] # VLM模型配置 llm_heartflow: Dict[str, Any] # 心流模型配置 llm_observation: Dict[str, Any] # 观察模型配置 @@ -203,7 +203,7 @@ class APIBotConfig: "llm_reasoning", "llm_normal", "llm_topic_judge", - "llm_summary", + "model.summary", "vlm", "llm_heartflow", "llm_observation", diff --git a/src/chat/focus_chat/expressors/default_expressor.py b/src/chat/focus_chat/expressors/default_expressor.py index ccbc1ca56..81f577b61 100644 --- a/src/chat/focus_chat/expressors/default_expressor.py +++ b/src/chat/focus_chat/expressors/default_expressor.py @@ -351,7 +351,7 @@ class DefaultExpressor: grammar_habbits=grammar_habbits_str, chat_target=chat_target_1, chat_info=chat_talking_prompt, - bot_name=global_config.BOT_NICKNAME, + bot_name=global_config.bot.nickname, prompt_personality="", reason=reason, in_mind_reply=in_mind_reply, @@ -363,7 +363,7 @@ class DefaultExpressor: template_name, sender_name=effective_sender_name, # Used in private template chat_talking_prompt=chat_talking_prompt, - bot_name=global_config.BOT_NICKNAME, + bot_name=global_config.bot.nickname, prompt_personality=prompt_personality, reason=reason, moderation_prompt=await global_prompt_manager.get_prompt_async("moderation_prompt"), diff --git a/src/chat/focus_chat/heartflow_prompt_builder.py b/src/chat/focus_chat/heartflow_prompt_builder.py index af526eb88..d8d2b836f 100644 --- a/src/chat/focus_chat/heartflow_prompt_builder.py +++ b/src/chat/focus_chat/heartflow_prompt_builder.py @@ -7,7 +7,6 @@ from src.chat.person_info.relationship_manager import relationship_manager from src.chat.utils.utils import get_embedding import time from typing import Union, Optional -from src.common.database import db from src.chat.utils.utils import get_recent_group_speaker from src.manager.mood_manager import mood_manager from src.chat.memory_system.Hippocampus import HippocampusManager diff --git a/src/chat/focus_chat/info_processors/chattinginfo_processor.py b/src/chat/focus_chat/info_processors/chattinginfo_processor.py index 8d1eb9793..c9641b9b7 100644 --- a/src/chat/focus_chat/info_processors/chattinginfo_processor.py +++ b/src/chat/focus_chat/info_processors/chattinginfo_processor.py @@ -27,7 +27,7 @@ class ChattingInfoProcessor(BaseProcessor): """初始化观察处理器""" super().__init__() # TODO: API-Adapter修改标记 - self.llm_summary = LLMRequest( + self.model_summary = LLMRequest( model=global_config.model.observation, temperature=0.7, max_tokens=300, request_type="chat_observation" ) @@ -94,7 +94,7 @@ class ChattingInfoProcessor(BaseProcessor): async def chat_compress(self, obs: ChattingObservation): if obs.compressor_prompt: try: - summary_result, _, _ = await self.llm_summary.generate_response(obs.compressor_prompt) + summary_result, _, _ = await self.model_summary.generate_response(obs.compressor_prompt) summary = "没有主题的闲聊" # 默认值 if summary_result: # 确保结果不为空 summary = summary_result diff --git a/src/chat/focus_chat/info_processors/self_processor.py b/src/chat/focus_chat/info_processors/self_processor.py index 19876c93c..5114e49b6 100644 --- a/src/chat/focus_chat/info_processors/self_processor.py +++ b/src/chat/focus_chat/info_processors/self_processor.py @@ -49,8 +49,8 @@ class SelfProcessor(BaseProcessor): self.subheartflow_id = subheartflow_id self.llm_model = LLMRequest( - model=global_config.llm_sub_heartflow, - temperature=global_config.llm_sub_heartflow["temp"], + model=global_config.model.sub_heartflow, + temperature=global_config.model.sub_heartflow["temp"], max_tokens=800, request_type="self_identify", ) diff --git a/src/chat/focus_chat/info_processors/working_memory_processor.py b/src/chat/focus_chat/info_processors/working_memory_processor.py index c682da699..c79c8363d 100644 --- a/src/chat/focus_chat/info_processors/working_memory_processor.py +++ b/src/chat/focus_chat/info_processors/working_memory_processor.py @@ -61,8 +61,8 @@ class WorkingMemoryProcessor(BaseProcessor): self.subheartflow_id = subheartflow_id self.llm_model = LLMRequest( - model=global_config.llm_sub_heartflow, - temperature=global_config.llm_sub_heartflow["temp"], + model=global_config.model.sub_heartflow, + temperature=global_config.model.sub_heartflow["temp"], max_tokens=800, request_type="working_memory", ) @@ -118,7 +118,7 @@ class WorkingMemoryProcessor(BaseProcessor): # 使用提示模板进行处理 prompt = (await global_prompt_manager.get_prompt_async("prompt_memory_proces")).format( - bot_name=global_config.BOT_NICKNAME, + bot_name=global_config.bot.nickname, time_now=time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), chat_observe_info=chat_info, memory_str=memory_choose_str, diff --git a/src/chat/focus_chat/planners/planner.py b/src/chat/focus_chat/planners/planner.py index 21ca157f9..116419ee1 100644 --- a/src/chat/focus_chat/planners/planner.py +++ b/src/chat/focus_chat/planners/planner.py @@ -69,7 +69,7 @@ class ActionPlanner: self.log_prefix = log_prefix # LLM规划器配置 self.planner_llm = LLMRequest( - model=global_config.llm_plan, + model=global_config.model.plan, max_tokens=1000, request_type="action_planning", # 用于动作规划 ) @@ -273,7 +273,7 @@ class ActionPlanner: planner_prompt_template = await global_prompt_manager.get_prompt_async("planner_prompt") prompt = planner_prompt_template.format( - bot_name=global_config.BOT_NICKNAME, + bot_name=global_config.bot.nickname, prompt_personality=personality_block, chat_context_description=chat_context_description, chat_content_block=chat_content_block, diff --git a/src/chat/focus_chat/working_memory/memory_manager.py b/src/chat/focus_chat/working_memory/memory_manager.py index 7154fe48c..7fda40239 100644 --- a/src/chat/focus_chat/working_memory/memory_manager.py +++ b/src/chat/focus_chat/working_memory/memory_manager.py @@ -33,7 +33,7 @@ class MemoryManager: self._id_map: Dict[str, MemoryItem] = {} self.llm_summarizer = LLMRequest( - model=global_config.llm_summary, temperature=0.3, max_tokens=512, request_type="memory_summarization" + model=global_config.model.summary, temperature=0.3, max_tokens=512, request_type="memory_summarization" ) @property diff --git a/src/chat/heart_flow/observation/chatting_observation.py b/src/chat/heart_flow/observation/chatting_observation.py index 9ea18b471..7e4872014 100644 --- a/src/chat/heart_flow/observation/chatting_observation.py +++ b/src/chat/heart_flow/observation/chatting_observation.py @@ -67,7 +67,7 @@ class ChattingObservation(Observation): self.oldest_messages_str = "" self.compressor_prompt = "" # TODO: API-Adapter修改标记 - self.llm_summary = LLMRequest( + self.model_summary = LLMRequest( model=global_config.model.observation, temperature=0.7, max_tokens=300, request_type="chat_observation" ) diff --git a/src/chat/memory_system/Hippocampus.py b/src/chat/memory_system/Hippocampus.py index 2de769205..aae1721c2 100644 --- a/src/chat/memory_system/Hippocampus.py +++ b/src/chat/memory_system/Hippocampus.py @@ -193,7 +193,7 @@ class Hippocampus: def __init__(self): self.memory_graph = MemoryGraph() self.llm_topic_judge = None - self.llm_summary = None + self.model_summary = None self.entorhinal_cortex = None self.parahippocampal_gyrus = None @@ -205,7 +205,7 @@ class Hippocampus: self.entorhinal_cortex.sync_memory_from_db() # TODO: API-Adapter修改标记 self.llm_topic_judge = LLMRequest(global_config.model.topic_judge, request_type="memory") - self.llm_summary = LLMRequest(global_config.model.summary, request_type="memory") + self.model_summary = LLMRequest(global_config.model.summary, request_type="memory") def get_all_node_names(self) -> list: """获取记忆图中所有节点的名字列表""" @@ -1167,7 +1167,7 @@ class ParahippocampalGyrus: # 调用修改后的 topic_what,不再需要 time_info topic_what_prompt = self.hippocampus.topic_what(input_text, topic) try: - task = self.hippocampus.llm_summary.generate_response_async(topic_what_prompt) + task = self.hippocampus.model_summary.generate_response_async(topic_what_prompt) tasks.append((topic.strip(), task)) except Exception as e: logger.error(f"生成话题 '{topic}' 的摘要时发生错误: {e}") diff --git a/src/chat/message_receive/bot.py b/src/chat/message_receive/bot.py index 0e35f6f6e..cea791de4 100644 --- a/src/chat/message_receive/bot.py +++ b/src/chat/message_receive/bot.py @@ -72,6 +72,7 @@ class ChatBot: message_data["message_info"]["user_info"]["user_id"] = str( message_data["message_info"]["user_info"]["user_id"] ) + # print(message_data) logger.trace(f"处理消息:{str(message_data)[:120]}...") message = MessageRecv(message_data) groupinfo = message.message_info.group_info @@ -86,12 +87,14 @@ class ChatBot: logger.trace("检测到私聊消息,检查") # 好友黑名单拦截 if userinfo.user_id not in global_config.experimental.talk_allowed_private: - logger.debug(f"用户{userinfo.user_id}没有私聊权限") + # logger.debug(f"用户{userinfo.user_id}没有私聊权限") return # 群聊黑名单拦截 + # print(groupinfo.group_id) + # print(global_config.chat_target.talk_allowed_groups) if groupinfo is not None and groupinfo.group_id not in global_config.chat_target.talk_allowed_groups: - logger.trace(f"群{groupinfo.group_id}被禁止回复") + logger.debug(f"群{groupinfo.group_id}被禁止回复") return # 确认从接口发来的message是否有自定义的prompt模板信息 diff --git a/src/chat/person_info/relationship_manager.py b/src/chat/person_info/relationship_manager.py index c8a443857..a23780c0e 100644 --- a/src/chat/person_info/relationship_manager.py +++ b/src/chat/person_info/relationship_manager.py @@ -77,7 +77,7 @@ class RelationshipManager: @staticmethod async def is_known_some_one(platform, user_id): """判断是否认识某人""" - is_known = person_info_manager.is_person_known(platform, user_id) + is_known = await person_info_manager.is_person_known(platform, user_id) return is_known @staticmethod diff --git a/src/experimental/PFC/pfc.py b/src/experimental/PFC/pfc.py index 686d4af49..80e75c5bf 100644 --- a/src/experimental/PFC/pfc.py +++ b/src/experimental/PFC/pfc.py @@ -316,7 +316,7 @@ class GoalAnalyzer: # message_segment = Seg(type="text", data=content) # bot_user_info = UserInfo( # user_id=global_config.BOT_QQ, -# user_nickname=global_config.BOT_NICKNAME, +# user_nickname=global_config.bot.nickname, # platform=chat_stream.platform, # )