diff --git a/src/chat/focus_chat/heartFC_chat.py b/src/chat/focus_chat/heartFC_chat.py index 22e361fc7..3835b2da8 100644 --- a/src/chat/focus_chat/heartFC_chat.py +++ b/src/chat/focus_chat/heartFC_chat.py @@ -16,6 +16,7 @@ from src.chat.focus_chat.info.info_base import InfoBase from src.chat.focus_chat.info_processors.chattinginfo_processor import ChattingInfoProcessor from src.chat.focus_chat.info_processors.mind_processor import MindProcessor from src.chat.focus_chat.info_processors.working_memory_processor import WorkingMemoryProcessor + # from src.chat.focus_chat.info_processors.action_processor import ActionProcessor from src.chat.heart_flow.observation.hfcloop_observation import HFCloopObservation from src.chat.heart_flow.observation.working_observation import WorkingMemoryObservation @@ -481,7 +482,7 @@ class HeartFChatting: await self.action_modifier.modify_actions(observations=observations, running_memorys=running_memorys) await self.action_observation.observe() observations.append(self.action_observation) - + with Timer("执行 信息处理器", cycle_timers): all_plan_info, processor_time_costs = await self._process_processors( observations, running_memorys, cycle_timers diff --git a/src/chat/focus_chat/info_processors/chattinginfo_processor.py b/src/chat/focus_chat/info_processors/chattinginfo_processor.py index 5bbaee4f7..c2f90819d 100644 --- a/src/chat/focus_chat/info_processors/chattinginfo_processor.py +++ b/src/chat/focus_chat/info_processors/chattinginfo_processor.py @@ -28,7 +28,10 @@ class ChattingInfoProcessor(BaseProcessor): super().__init__() # TODO: API-Adapter修改标记 self.model_summary = LLMRequest( - model=global_config.model.utils_small, temperature=0.7, max_tokens=300, request_type="focus.observation.chat" + model=global_config.model.utils_small, + temperature=0.7, + max_tokens=300, + request_type="focus.observation.chat", ) async def process_info( diff --git a/src/chat/focus_chat/memory_activator.py b/src/chat/focus_chat/memory_activator.py index 1e84e3d42..18a38f33e 100644 --- a/src/chat/focus_chat/memory_activator.py +++ b/src/chat/focus_chat/memory_activator.py @@ -70,7 +70,10 @@ class MemoryActivator: def __init__(self): # TODO: API-Adapter修改标记 self.summary_model = LLMRequest( - model=global_config.model.memory_summary, temperature=0.7, max_tokens=50, request_type="focus.memory_activator" + model=global_config.model.memory_summary, + temperature=0.7, + max_tokens=50, + request_type="focus.memory_activator", ) self.running_memory = [] self.cached_keywords = set() # 用于缓存历史关键词 diff --git a/src/chat/focus_chat/planners/modify_actions.py b/src/chat/focus_chat/planners/modify_actions.py index 704be3900..c376426a6 100644 --- a/src/chat/focus_chat/planners/modify_actions.py +++ b/src/chat/focus_chat/planners/modify_actions.py @@ -12,14 +12,13 @@ from src.chat.focus_chat.planners.action_manager import ActionManager logger = get_logger("action_manager") -class ActionModifier(): +class ActionModifier: """动作处理器 用于处理Observation对象,将其转换为ObsInfo对象。 """ log_prefix = "动作处理" - def __init__(self, action_manager: ActionManager): """初始化观察处理器""" @@ -70,7 +69,7 @@ class ActionModifier(): # reasons.append(f"移除动作{action_changes['remove']}因为检测到连续回复") # 处理ChattingObservation - if chat_obs : + if chat_obs: obs = chat_obs # 检查动作的关联类型 chat_context = chat_manager.get_stream(obs.chat_id).context diff --git a/src/chat/focus_chat/planners/planner.py b/src/chat/focus_chat/planners/planner.py index 1ece41fe4..d80ac333b 100644 --- a/src/chat/focus_chat/planners/planner.py +++ b/src/chat/focus_chat/planners/planner.py @@ -120,8 +120,6 @@ class ActionPlanner: # self.action_manager.remove_action_from_using(action_name) # logger.debug(f"{self.log_prefix}移除动作: {action_name}, 原因: {reason}") - - # 继续处理其他信息 self_info = "" current_mind = "" @@ -144,7 +142,7 @@ class ActionPlanner: structured_info = info.get_processed_info() # print(f"structured_info: {structured_info}") # elif not isinstance(info, ActionInfo): # 跳过已处理的ActionInfo - # extra_info.append(info.get_processed_info()) + # extra_info.append(info.get_processed_info()) # 获取当前可用的动作 current_available_actions = self.action_manager.get_using_actions() diff --git a/src/chat/heart_flow/observation/actions_observation.py b/src/chat/heart_flow/observation/actions_observation.py index 6f80ec719..8310a17b7 100644 --- a/src/chat/heart_flow/observation/actions_observation.py +++ b/src/chat/heart_flow/observation/actions_observation.py @@ -6,6 +6,7 @@ from src.chat.focus_chat.planners.action_manager import ActionManager logger = get_logger("observation") + # 特殊的观察,专门用于观察动作 # 所有观察的基类 class ActionObservation: @@ -21,7 +22,6 @@ class ActionObservation: def get_observe_info(self): return self.observe_info - def set_action_manager(self, action_manager: ActionManager): self.action_manager = action_manager self.all_actions = self.action_manager.get_registered_actions() diff --git a/src/tools/tool_can_use/lpmm_get_knowledge.py b/src/tools/tool_can_use/lpmm_get_knowledge.py index 4b6129df6..fc2dc072b 100644 --- a/src/tools/tool_can_use/lpmm_get_knowledge.py +++ b/src/tools/tool_can_use/lpmm_get_knowledge.py @@ -39,9 +39,9 @@ class SearchKnowledgeFromLPMMTool(BaseTool): # 调用知识库搜索 knowledge_info = qa_manager.get_knowledge(query) - + logger.debug(f"知识库查询结果: {knowledge_info}") - + if knowledge_info: content = f"你知道这些知识: {knowledge_info}" else: @@ -53,6 +53,3 @@ class SearchKnowledgeFromLPMMTool(BaseTool): # 在其他异常情况下,确保 id 仍然是 query (如果它被定义了) query_id = query if "query" in locals() else "unknown_query" return {"type": "info", "id": query_id, "content": f"lpmm知识库搜索失败,炸了: {str(e)}"} - - -