From 869a02d232221d26bb16f54396d580e649b01d44 Mon Sep 17 00:00:00 2001 From: "CNMr.Sunshine" <61444298+CNMrSunshine@users.noreply.github.com> Date: Sun, 6 Jul 2025 11:10:14 +0800 Subject: [PATCH 1/8] Update planner_simple.py --- src/chat/focus_chat/planners/planner_simple.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/src/chat/focus_chat/planners/planner_simple.py b/src/chat/focus_chat/planners/planner_simple.py index 20f41c711..05c57dcf6 100644 --- a/src/chat/focus_chat/planners/planner_simple.py +++ b/src/chat/focus_chat/planners/planner_simple.py @@ -29,6 +29,11 @@ def init_prompt(): {chat_context_description},以下是具体的聊天内容: {chat_content_block} {moderation_prompt} + +重要提醒:避免重复回复同一话题 +- 如果你在最近1分钟内已经对某个话题进行了回复,不要再次回复相同或相似的内容 +- 如果聊天记录显示你刚刚已经回复过相似内容,即使话题仍然在进行,也应该选择no_reply + 现在请你根据聊天内容选择合适的action: {action_options_text} From 54516bc8731227852db9acd075419798ee2ea3fe Mon Sep 17 00:00:00 2001 From: "CNMr.Sunshine" <61444298+CNMrSunshine@users.noreply.github.com> Date: Sun, 6 Jul 2025 11:19:13 +0800 Subject: [PATCH 2/8] Update planner_simple.py --- src/chat/focus_chat/planners/planner_simple.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/src/chat/focus_chat/planners/planner_simple.py b/src/chat/focus_chat/planners/planner_simple.py index 05c57dcf6..0b7aa96e7 100644 --- a/src/chat/focus_chat/planners/planner_simple.py +++ b/src/chat/focus_chat/planners/planner_simple.py @@ -30,9 +30,7 @@ def init_prompt(): {chat_content_block} {moderation_prompt} -重要提醒:避免重复回复同一话题 -- 如果你在最近1分钟内已经对某个话题进行了回复,不要再次回复相同或相似的内容 -- 如果聊天记录显示你刚刚已经回复过相似内容,即使话题仍然在进行,也应该选择no_reply +重要提醒:如果聊天记录显示你刚刚已经回复过相似内容,即使话题仍然在进行,必须选择no_reply 现在请你根据聊天内容选择合适的action: From 2a0dfb7642209723628e0d205e59847f06a3f3d0 Mon Sep 17 00:00:00 2001 From: "CNMr.Sunshine" <61444298+CNMrSunshine@users.noreply.github.com> Date: Sun, 6 Jul 2025 11:25:15 +0800 Subject: [PATCH 3/8] Update working_memory_processor.py --- .../working_memory_processor.py | 18 +++++++++++++++++- 1 file changed, 17 insertions(+), 1 deletion(-) diff --git a/src/chat/focus_chat/info_processors/working_memory_processor.py b/src/chat/focus_chat/info_processors/working_memory_processor.py index 2de0bcfab..f81833c00 100644 --- a/src/chat/focus_chat/info_processors/working_memory_processor.py +++ b/src/chat/focus_chat/info_processors/working_memory_processor.py @@ -71,6 +71,7 @@ class WorkingMemoryProcessor(BaseProcessor): """ working_memory = None chat_info = "" + chat_obs = None try: for observation in observations: if isinstance(observation, WorkingMemoryObservation): @@ -79,10 +80,15 @@ class WorkingMemoryProcessor(BaseProcessor): chat_info = observation.get_observe_info() chat_obs = observation # 检查是否有待压缩内容 - if chat_obs.compressor_prompt: + if chat_obs and chat_obs.compressor_prompt: logger.debug(f"{self.log_prefix} 压缩聊天记忆") await self.compress_chat_memory(working_memory, chat_obs) + # 检查working_memory是否为None + if working_memory is None: + logger.debug(f"{self.log_prefix} 没有找到工作记忆观察,跳过处理") + return [] + all_memory = working_memory.get_all_memories() if not all_memory: logger.debug(f"{self.log_prefix} 目前没有工作记忆,跳过提取") @@ -183,6 +189,11 @@ class WorkingMemoryProcessor(BaseProcessor): working_memory: 工作记忆对象 obs: 聊天观察对象 """ + # 检查working_memory是否为None + if working_memory is None: + logger.warning(f"{self.log_prefix} 工作记忆对象为None,无法压缩聊天记忆") + return + try: summary_result, _ = await self.llm_model.generate_response_async(obs.compressor_prompt) if not summary_result: @@ -235,6 +246,11 @@ class WorkingMemoryProcessor(BaseProcessor): memory_id1: 第一个记忆ID memory_id2: 第二个记忆ID """ + # 检查working_memory是否为None + if working_memory is None: + logger.warning(f"{self.log_prefix} 工作记忆对象为None,无法合并记忆") + return + try: merged_memory = await working_memory.merge_memory(memory_id1, memory_id2) logger.debug(f"{self.log_prefix} 合并后的记忆梗概: {merged_memory.brief}") From c0de1fcc3f130f4e428c1da54288530100141cf8 Mon Sep 17 00:00:00 2001 From: "CNMr.Sunshine" <61444298+CNMrSunshine@users.noreply.github.com> Date: Sun, 6 Jul 2025 11:25:37 +0800 Subject: [PATCH 4/8] Update knowledge_lib.py --- src/chat/knowledge/knowledge_lib.py | 108 +++++++++++++++------------- 1 file changed, 58 insertions(+), 50 deletions(-) diff --git a/src/chat/knowledge/knowledge_lib.py b/src/chat/knowledge/knowledge_lib.py index 6a4fcd4ea..a9d603b9f 100644 --- a/src/chat/knowledge/knowledge_lib.py +++ b/src/chat/knowledge/knowledge_lib.py @@ -5,60 +5,68 @@ from src.chat.knowledge.mem_active_manager import MemoryActiveManager from src.chat.knowledge.qa_manager import QAManager from src.chat.knowledge.kg_manager import KGManager from src.chat.knowledge.global_logger import logger +from src.config.config import global_config as bot_global_config # try: # import quick_algo # except ImportError: # print("quick_algo not found, please install it first") -logger.info("正在初始化Mai-LPMM\n") -logger.info("创建LLM客户端") -llm_client_list = dict() -for key in global_config["llm_providers"]: - llm_client_list[key] = LLMClient( - global_config["llm_providers"][key]["base_url"], - global_config["llm_providers"][key]["api_key"], +# 检查LPMM知识库是否启用 +if bot_global_config.lpmm_knowledge.enable: + logger.info("正在初始化Mai-LPMM\n") + logger.info("创建LLM客户端") + llm_client_list = dict() + for key in global_config["llm_providers"]: + llm_client_list[key] = LLMClient( + global_config["llm_providers"][key]["base_url"], + global_config["llm_providers"][key]["api_key"], + ) + + # 初始化Embedding库 + embed_manager = EmbeddingManager(llm_client_list[global_config["embedding"]["provider"]]) + logger.info("正在从文件加载Embedding库") + try: + embed_manager.load_from_file() + except Exception as e: + logger.warning("此消息不会影响正常使用:从文件加载Embedding库时,{}".format(e)) + # logger.warning("如果你是第一次导入知识,或者还未导入知识,请忽略此错误") + logger.info("Embedding库加载完成") + # 初始化KG + kg_manager = KGManager() + logger.info("正在从文件加载KG") + try: + kg_manager.load_from_file() + except Exception as e: + logger.warning("此消息不会影响正常使用:从文件加载KG时,{}".format(e)) + # logger.warning("如果你是第一次导入知识,或者还未导入知识,请忽略此错误") + logger.info("KG加载完成") + + logger.info(f"KG节点数量:{len(kg_manager.graph.get_node_list())}") + logger.info(f"KG边数量:{len(kg_manager.graph.get_edge_list())}") + + + # 数据比对:Embedding库与KG的段落hash集合 + for pg_hash in kg_manager.stored_paragraph_hashes: + key = PG_NAMESPACE + "-" + pg_hash + if key not in embed_manager.stored_pg_hashes: + logger.warning(f"KG中存在Embedding库中不存在的段落:{key}") + + # 问答系统(用于知识库) + qa_manager = QAManager( + embed_manager, + kg_manager, + llm_client_list[global_config["embedding"]["provider"]], + llm_client_list[global_config["qa"]["llm"]["provider"]], + llm_client_list[global_config["qa"]["llm"]["provider"]], ) -# 初始化Embedding库 -embed_manager = EmbeddingManager(llm_client_list[global_config["embedding"]["provider"]]) -logger.info("正在从文件加载Embedding库") -try: - embed_manager.load_from_file() -except Exception as e: - logger.warning("此消息不会影响正常使用:从文件加载Embedding库时,{}".format(e)) - # logger.warning("如果你是第一次导入知识,或者还未导入知识,请忽略此错误") -logger.info("Embedding库加载完成") -# 初始化KG -kg_manager = KGManager() -logger.info("正在从文件加载KG") -try: - kg_manager.load_from_file() -except Exception as e: - logger.warning("此消息不会影响正常使用:从文件加载KG时,{}".format(e)) - # logger.warning("如果你是第一次导入知识,或者还未导入知识,请忽略此错误") -logger.info("KG加载完成") - -logger.info(f"KG节点数量:{len(kg_manager.graph.get_node_list())}") -logger.info(f"KG边数量:{len(kg_manager.graph.get_edge_list())}") - - -# 数据比对:Embedding库与KG的段落hash集合 -for pg_hash in kg_manager.stored_paragraph_hashes: - key = PG_NAMESPACE + "-" + pg_hash - if key not in embed_manager.stored_pg_hashes: - logger.warning(f"KG中存在Embedding库中不存在的段落:{key}") - -# 问答系统(用于知识库) -qa_manager = QAManager( - embed_manager, - kg_manager, - llm_client_list[global_config["embedding"]["provider"]], - llm_client_list[global_config["qa"]["llm"]["provider"]], - llm_client_list[global_config["qa"]["llm"]["provider"]], -) - -# 记忆激活(用于记忆库) -inspire_manager = MemoryActiveManager( - embed_manager, - llm_client_list[global_config["embedding"]["provider"]], -) + # 记忆激活(用于记忆库) + inspire_manager = MemoryActiveManager( + embed_manager, + llm_client_list[global_config["embedding"]["provider"]], + ) +else: + logger.info("LPMM知识库已禁用,跳过初始化") + # 创建空的占位符对象,避免导入错误 + qa_manager = None + inspire_manager = None From 9c0271b10f8f83277c662d36f4b805e3ee4f0141 Mon Sep 17 00:00:00 2001 From: "CNMr.Sunshine" <61444298+CNMrSunshine@users.noreply.github.com> Date: Sun, 6 Jul 2025 11:25:51 +0800 Subject: [PATCH 5/8] Update default_generator.py --- src/chat/replyer/default_generator.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/src/chat/replyer/default_generator.py b/src/chat/replyer/default_generator.py index da9d9a584..1f0b438dc 100644 --- a/src/chat/replyer/default_generator.py +++ b/src/chat/replyer/default_generator.py @@ -956,6 +956,11 @@ async def get_prompt_info(message: str, threshold: float): logger.debug(f"获取知识库内容,元消息:{message[:30]}...,消息长度: {len(message)}") # 从LPMM知识库获取知识 try: + # 检查LPMM知识库是否启用 + if qa_manager is None: + logger.debug("LPMM知识库已禁用,跳过知识获取") + return "" + found_knowledge_from_lpmm = qa_manager.get_knowledge(message) end_time = time.time() From cb5fa45523ceb79e142e31f8dee6160c4e6e0bac Mon Sep 17 00:00:00 2001 From: "CNMr.Sunshine" <61444298+CNMrSunshine@users.noreply.github.com> Date: Sun, 6 Jul 2025 11:26:07 +0800 Subject: [PATCH 6/8] Update pfc_KnowledgeFetcher.py --- src/experimental/PFC/pfc_KnowledgeFetcher.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/src/experimental/PFC/pfc_KnowledgeFetcher.py b/src/experimental/PFC/pfc_KnowledgeFetcher.py index 38a6dafb9..c52533ea6 100644 --- a/src/experimental/PFC/pfc_KnowledgeFetcher.py +++ b/src/experimental/PFC/pfc_KnowledgeFetcher.py @@ -35,6 +35,11 @@ class KnowledgeFetcher: logger.debug(f"[私聊][{self.private_name}]正在从LPMM知识库中获取知识") try: + # 检查LPMM知识库是否启用 + if qa_manager is None: + logger.debug(f"[私聊][{self.private_name}]LPMM知识库已禁用,跳过知识获取") + return "未找到匹配的知识" + knowledge_info = qa_manager.get_knowledge(query) logger.debug(f"[私聊][{self.private_name}]LPMM知识库查询结果: {knowledge_info:150}") return knowledge_info From 1da67ae067831714c6ff25b8a3ff70ea4d6c6cc1 Mon Sep 17 00:00:00 2001 From: "CNMr.Sunshine" <61444298+CNMrSunshine@users.noreply.github.com> Date: Sun, 6 Jul 2025 11:26:22 +0800 Subject: [PATCH 7/8] Update lpmm_get_knowledge.py --- src/tools/not_using/lpmm_get_knowledge.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/src/tools/not_using/lpmm_get_knowledge.py b/src/tools/not_using/lpmm_get_knowledge.py index df4fa6a4f..80b9b617b 100644 --- a/src/tools/not_using/lpmm_get_knowledge.py +++ b/src/tools/not_using/lpmm_get_knowledge.py @@ -36,6 +36,11 @@ class SearchKnowledgeFromLPMMTool(BaseTool): query = function_args.get("query") # threshold = function_args.get("threshold", 0.4) + # 检查LPMM知识库是否启用 + if qa_manager is None: + logger.debug("LPMM知识库已禁用,跳过知识获取") + return {"type": "info", "id": query, "content": "LPMM知识库已禁用"} + # 调用知识库搜索 knowledge_info = qa_manager.get_knowledge(query) From f624547034cf7c9f6e6f5bef57a78117bd333e92 Mon Sep 17 00:00:00 2001 From: SengokuCola <1026294844@qq.com> Date: Sun, 6 Jul 2025 11:40:12 +0800 Subject: [PATCH 8/8] Update planner_simple.py --- src/chat/focus_chat/planners/planner_simple.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/chat/focus_chat/planners/planner_simple.py b/src/chat/focus_chat/planners/planner_simple.py index 0b7aa96e7..3d044d6e7 100644 --- a/src/chat/focus_chat/planners/planner_simple.py +++ b/src/chat/focus_chat/planners/planner_simple.py @@ -30,8 +30,6 @@ def init_prompt(): {chat_content_block} {moderation_prompt} -重要提醒:如果聊天记录显示你刚刚已经回复过相似内容,即使话题仍然在进行,必须选择no_reply - 现在请你根据聊天内容选择合适的action: {action_options_text}