diff --git a/src/chat/focus_chat/info_processors/working_memory_processor.py b/src/chat/focus_chat/info_processors/working_memory_processor.py index 2de0bcfab..f81833c00 100644 --- a/src/chat/focus_chat/info_processors/working_memory_processor.py +++ b/src/chat/focus_chat/info_processors/working_memory_processor.py @@ -71,6 +71,7 @@ class WorkingMemoryProcessor(BaseProcessor): """ working_memory = None chat_info = "" + chat_obs = None try: for observation in observations: if isinstance(observation, WorkingMemoryObservation): @@ -79,10 +80,15 @@ class WorkingMemoryProcessor(BaseProcessor): chat_info = observation.get_observe_info() chat_obs = observation # 检查是否有待压缩内容 - if chat_obs.compressor_prompt: + if chat_obs and chat_obs.compressor_prompt: logger.debug(f"{self.log_prefix} 压缩聊天记忆") await self.compress_chat_memory(working_memory, chat_obs) + # 检查working_memory是否为None + if working_memory is None: + logger.debug(f"{self.log_prefix} 没有找到工作记忆观察,跳过处理") + return [] + all_memory = working_memory.get_all_memories() if not all_memory: logger.debug(f"{self.log_prefix} 目前没有工作记忆,跳过提取") @@ -183,6 +189,11 @@ class WorkingMemoryProcessor(BaseProcessor): working_memory: 工作记忆对象 obs: 聊天观察对象 """ + # 检查working_memory是否为None + if working_memory is None: + logger.warning(f"{self.log_prefix} 工作记忆对象为None,无法压缩聊天记忆") + return + try: summary_result, _ = await self.llm_model.generate_response_async(obs.compressor_prompt) if not summary_result: @@ -235,6 +246,11 @@ class WorkingMemoryProcessor(BaseProcessor): memory_id1: 第一个记忆ID memory_id2: 第二个记忆ID """ + # 检查working_memory是否为None + if working_memory is None: + logger.warning(f"{self.log_prefix} 工作记忆对象为None,无法合并记忆") + return + try: merged_memory = await working_memory.merge_memory(memory_id1, memory_id2) logger.debug(f"{self.log_prefix} 合并后的记忆梗概: {merged_memory.brief}")