From dba967c953582a681ade0d92d4e7062714b5a856 Mon Sep 17 00:00:00 2001 From: Bakadax Date: Sun, 27 Apr 2025 19:31:01 +0800 Subject: [PATCH] =?UTF-8?q?=E6=8A=8A=E6=89=80=E6=9C=89=E7=9A=84=E8=81=8A?= =?UTF-8?q?=E5=A4=A9=E8=AE=B0=E5=BD=95=E9=83=BD=E6=A0=BC=E5=BC=8F=E5=8C=96?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/plugins/PFC/chat_observer.py | 4 ++-- src/plugins/PFC/pfc.py | 16 +++++++--------- src/plugins/PFC/pfc_KnowledgeFetcher.py | 12 ++++++++---- 3 files changed, 17 insertions(+), 15 deletions(-) diff --git a/src/plugins/PFC/chat_observer.py b/src/plugins/PFC/chat_observer.py index 0305289fa..915618474 100644 --- a/src/plugins/PFC/chat_observer.py +++ b/src/plugins/PFC/chat_observer.py @@ -287,7 +287,7 @@ class ChatObserver: self._running = True self._task = asyncio.create_task(self._update_loop()) - logger.info(f"ChatObserver for {self.stream_id} started") + logger.debug(f"ChatObserver for {self.stream_id} started") def stop(self): """停止观察器""" @@ -296,7 +296,7 @@ class ChatObserver: self._update_complete.set() # 设置完成事件以解除等待 if self._task: self._task.cancel() - logger.info(f"ChatObserver for {self.stream_id} stopped") + logger.debug(f"ChatObserver for {self.stream_id} stopped") async def process_chat_history(self, messages: list): """处理聊天历史 diff --git a/src/plugins/PFC/pfc.py b/src/plugins/PFC/pfc.py index 5a70d02f3..a3acee47b 100644 --- a/src/plugins/PFC/pfc.py +++ b/src/plugins/PFC/pfc.py @@ -250,15 +250,13 @@ class GoalAnalyzer: async def analyze_conversation(self, goal, reasoning): messages = self.chat_observer.get_cached_messages() - chat_history_text = "" - for msg in messages: - time_str = datetime.datetime.fromtimestamp(msg["time"]).strftime("%H:%M:%S") - user_info = UserInfo.from_dict(msg.get("user_info", {})) - sender = user_info.user_nickname or f"用户{user_info.user_id}" - if sender == self.name: - sender = "你说" - chat_history_text += f"{time_str},{sender}:{msg.get('processed_plain_text', '')}\n" - + chat_history_text = await build_readable_messages( + messages, + replace_bot_name=True, + merge_messages=False, + timestamp_mode="relative", + read_mark=0.0, + ) identity_details_only = self.identity_detail_info identity_addon = "" if isinstance(identity_details_only, str): diff --git a/src/plugins/PFC/pfc_KnowledgeFetcher.py b/src/plugins/PFC/pfc_KnowledgeFetcher.py index 95e66c8cd..958b05bf8 100644 --- a/src/plugins/PFC/pfc_KnowledgeFetcher.py +++ b/src/plugins/PFC/pfc_KnowledgeFetcher.py @@ -5,6 +5,7 @@ from ..models.utils_model import LLMRequest from ...config.config import global_config from ..chat.message import Message from ..knowledge.knowledge_lib import qa_manager +from ..utils.chat_message_builder import build_readable_messages logger = get_module_logger("knowledge_fetcher") @@ -50,10 +51,13 @@ class KnowledgeFetcher: Tuple[str, str]: (获取的知识, 知识来源) """ # 构建查询上下文 - chat_history_text = "" - for msg in chat_history: - # sender = msg.message_info.user_info.user_nickname or f"用户{msg.message_info.user_info.user_id}" - chat_history_text += f"{msg.detailed_plain_text}\n" + chat_history_text = await build_readable_messages( + chat_history, + replace_bot_name=True, + merge_messages=False, + timestamp_mode="relative", + read_mark=0.0, + ) # 从记忆中获取相关知识 related_memory = await HippocampusManager.get_instance().get_memory_from_text(