fix:修复Bug
This commit is contained in:
@@ -417,16 +417,22 @@ class HeartFChatting:
|
||||
# 并行执行两个任务:思考和工具执行
|
||||
with Timer("执行 信息处理器", cycle_timers):
|
||||
# 1. 子思维思考 - 不执行工具调用
|
||||
think_task = asyncio.create_task(self.mind_processor.process_info(observations=observations,running_memorys=running_memorys))
|
||||
think_task = asyncio.create_task(
|
||||
self.mind_processor.process_info(observations=observations, running_memorys=running_memorys)
|
||||
)
|
||||
logger.debug(f"{self.log_prefix} 启动子思维思考任务")
|
||||
|
||||
# 2. 工具执行器 - 专门处理工具调用
|
||||
tool_task = asyncio.create_task(self.tool_processor.process_info(observations=observations,running_memorys=running_memorys))
|
||||
tool_task = asyncio.create_task(
|
||||
self.tool_processor.process_info(observations=observations, running_memorys=running_memorys)
|
||||
)
|
||||
logger.debug(f"{self.log_prefix} 启动工具执行任务")
|
||||
|
||||
# 3. 聊天信息处理器
|
||||
chatting_info_task = asyncio.create_task(
|
||||
self.chatting_info_processor.process_info(observations=observations,running_memorys=running_memorys)
|
||||
self.chatting_info_processor.process_info(
|
||||
observations=observations, running_memorys=running_memorys
|
||||
)
|
||||
)
|
||||
logger.debug(f"{self.log_prefix} 启动聊天信息处理器任务")
|
||||
|
||||
|
||||
@@ -21,7 +21,11 @@ class BaseProcessor(ABC):
|
||||
|
||||
@abstractmethod
|
||||
async def process_info(
|
||||
self, infos: List[InfoBase], observations: Optional[List[Observation]] = None, running_memorys: Optional[List[Dict]] = None, **kwargs: Any
|
||||
self,
|
||||
infos: List[InfoBase],
|
||||
observations: Optional[List[Observation]] = None,
|
||||
running_memorys: Optional[List[Dict]] = None,
|
||||
**kwargs: Any,
|
||||
) -> List[InfoBase]:
|
||||
"""处理信息对象的抽象方法
|
||||
|
||||
|
||||
@@ -8,6 +8,7 @@ from src.heart_flow.observation.chatting_observation import ChattingObservation
|
||||
from src.heart_flow.observation.hfcloop_observation import HFCloopObservation
|
||||
from src.heart_flow.info.cycle_info import CycleInfo
|
||||
from typing import Dict
|
||||
|
||||
logger = get_logger("observation")
|
||||
|
||||
|
||||
@@ -21,7 +22,12 @@ class ChattingInfoProcessor(BaseProcessor):
|
||||
"""初始化观察处理器"""
|
||||
super().__init__()
|
||||
|
||||
async def process_info(self, observations: Optional[List[Observation]] = None, running_memorys: Optional[List[Dict]] = None, **kwargs: Any) -> List[InfoBase]:
|
||||
async def process_info(
|
||||
self,
|
||||
observations: Optional[List[Observation]] = None,
|
||||
running_memorys: Optional[List[Dict]] = None,
|
||||
**kwargs: Any,
|
||||
) -> List[InfoBase]:
|
||||
"""处理Observation对象
|
||||
|
||||
Args:
|
||||
|
||||
@@ -15,7 +15,6 @@ from src.plugins.person_info.relationship_manager import relationship_manager
|
||||
from .base_processor import BaseProcessor
|
||||
from src.heart_flow.info.mind_info import MindInfo
|
||||
from typing import List, Optional
|
||||
from src.heart_flow.observation.memory_observation import MemoryObservation
|
||||
from src.heart_flow.observation.hfcloop_observation import HFCloopObservation
|
||||
from src.plugins.heartFC_chat.info_processors.processor_utils import (
|
||||
calculate_similarity,
|
||||
@@ -124,7 +123,9 @@ class MindProcessor(BaseProcessor):
|
||||
self.structured_info_str = "\n".join(lines)
|
||||
logger.debug(f"{self.log_prefix} 更新 structured_info_str: \n{self.structured_info_str}")
|
||||
|
||||
async def process_info(self, observations: Optional[List[Observation]] = None, running_memorys: Optional[List[Dict]] = None, *infos) -> List[dict]:
|
||||
async def process_info(
|
||||
self, observations: Optional[List[Observation]] = None, running_memorys: Optional[List[Dict]] = None, *infos
|
||||
) -> List[dict]:
|
||||
"""处理信息对象
|
||||
|
||||
Args:
|
||||
@@ -133,14 +134,16 @@ class MindProcessor(BaseProcessor):
|
||||
Returns:
|
||||
List[dict]: 处理后的结构化信息列表
|
||||
"""
|
||||
current_mind = await self.do_thinking_before_reply(observations,running_memorys)
|
||||
current_mind = await self.do_thinking_before_reply(observations, running_memorys)
|
||||
|
||||
mind_info = MindInfo()
|
||||
mind_info.set_current_mind(current_mind)
|
||||
|
||||
return [mind_info]
|
||||
|
||||
async def do_thinking_before_reply(self, observations: Optional[List[Observation]] = None, running_memorys: Optional[List[Dict]] = None):
|
||||
async def do_thinking_before_reply(
|
||||
self, observations: Optional[List[Observation]] = None, running_memorys: Optional[List[Dict]] = None
|
||||
):
|
||||
"""
|
||||
在回复前进行思考,生成内心想法并收集工具调用结果
|
||||
|
||||
@@ -193,8 +196,6 @@ class MindProcessor(BaseProcessor):
|
||||
# 获取聊天内容
|
||||
chat_observe_info = observation.get_observe_info()
|
||||
person_list = observation.person_list
|
||||
if isinstance(observation, MemoryObservation):
|
||||
memory_observe_info = observation.get_observe_info()
|
||||
if isinstance(observation, HFCloopObservation):
|
||||
hfcloop_observe_info = observation.get_observe_info()
|
||||
|
||||
|
||||
@@ -63,7 +63,9 @@ class ToolProcessor(BaseProcessor):
|
||||
)
|
||||
self.structured_info = []
|
||||
|
||||
async def process_info(self, observations: Optional[List[Observation]] = None, running_memorys: Optional[List[Dict]] = None, *infos) -> List[dict]:
|
||||
async def process_info(
|
||||
self, observations: Optional[List[Observation]] = None, running_memorys: Optional[List[Dict]] = None, *infos
|
||||
) -> List[dict]:
|
||||
"""处理信息对象
|
||||
|
||||
Args:
|
||||
|
||||
@@ -32,12 +32,21 @@ Prompt(
|
||||
|
||||
class MemoryActivator:
|
||||
def __init__(self):
|
||||
self.summart_model = LLMRequest(
|
||||
self.summary_model = LLMRequest(
|
||||
model=global_config.llm_observation, temperature=0.7, max_tokens=300, request_type="chat_observation"
|
||||
)
|
||||
self.running_memory = []
|
||||
|
||||
async def activate_memory(self, observations) -> List[Dict]:
|
||||
"""
|
||||
激活记忆
|
||||
|
||||
Args:
|
||||
observations: 现有的进行观察后的 观察列表
|
||||
|
||||
Returns:
|
||||
List[Dict]: 激活的记忆列表
|
||||
"""
|
||||
obs_info_text = ""
|
||||
for observation in observations:
|
||||
if isinstance(observation, ChattingObservation):
|
||||
@@ -51,7 +60,11 @@ class MemoryActivator:
|
||||
|
||||
prompt = global_prompt_manager.format_prompt("memory_activator_prompt", obs_info_text=obs_info_text)
|
||||
|
||||
response = self.summart_model.generate_response(prompt)
|
||||
logger.debug(f"prompt: {prompt}")
|
||||
|
||||
response = await self.summary_model.generate_response(prompt)
|
||||
|
||||
logger.debug(f"response: {response}")
|
||||
|
||||
keywords = get_keywords_from_json(response)
|
||||
|
||||
|
||||
Reference in New Issue
Block a user