fixL:
This commit is contained in:
@@ -13,8 +13,8 @@ mai_state_config = LogConfig(
|
||||
logger = get_module_logger("mai_state_manager", config=mai_state_config)
|
||||
|
||||
|
||||
enable_unlimited_hfc_chat = True
|
||||
# enable_unlimited_hfc_chat = False
|
||||
# enable_unlimited_hfc_chat = True
|
||||
enable_unlimited_hfc_chat = False
|
||||
|
||||
|
||||
class MaiState(enum.Enum):
|
||||
|
||||
@@ -221,9 +221,7 @@ class SubHeartflow:
|
||||
|
||||
# 聊天状态管理
|
||||
self.chat_state: ChatStateInfo = ChatStateInfo() # 该sub_heartflow的聊天状态信息
|
||||
self.interest_chatting = InterestChatting(
|
||||
state_change_callback=self.set_chat_state
|
||||
)
|
||||
self.interest_chatting = InterestChatting(state_change_callback=self.set_chat_state)
|
||||
|
||||
# 活动状态管理
|
||||
self.last_active_time = time.time() # 最后活跃时间
|
||||
@@ -238,15 +236,11 @@ class SubHeartflow:
|
||||
|
||||
# LLM模型配置
|
||||
self.sub_mind = SubMind(
|
||||
subheartflow_id=self.subheartflow_id,
|
||||
chat_state=self.chat_state,
|
||||
observations=self.observations
|
||||
subheartflow_id=self.subheartflow_id, chat_state=self.chat_state, observations=self.observations
|
||||
)
|
||||
|
||||
|
||||
self.log_prefix = chat_manager.get_stream_name(self.subheartflow_id) or self.subheartflow_id
|
||||
|
||||
|
||||
async def add_time_current_state(self, add_time: float):
|
||||
self.current_state_time += add_time
|
||||
|
||||
@@ -335,9 +329,7 @@ class SubHeartflow:
|
||||
logger.info(f"{log_prefix} 麦麦准备开始专注聊天 (创建新实例)...")
|
||||
try:
|
||||
self.heart_fc_instance = HeartFChatting(
|
||||
chat_id=self.chat_id,
|
||||
sub_mind=self.sub_mind,
|
||||
observations=self.observations
|
||||
chat_id=self.chat_id, sub_mind=self.sub_mind, observations=self.observations
|
||||
)
|
||||
if await self.heart_fc_instance._initialize():
|
||||
await self.heart_fc_instance.start() # 初始化成功后启动循环
|
||||
@@ -434,7 +426,6 @@ class SubHeartflow:
|
||||
|
||||
logger.info(f"{self.log_prefix} 子心流后台任务已停止。")
|
||||
|
||||
|
||||
def update_current_mind(self, response):
|
||||
self.sub_mind.update_current_mind(response)
|
||||
|
||||
@@ -523,6 +514,3 @@ class SubHeartflow:
|
||||
self.chat_state.chat_status = ChatState.ABSENT # 状态重置为不参与
|
||||
|
||||
logger.info(f"{self.log_prefix} 子心流关闭完成。")
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -19,7 +19,6 @@ subheartflow_config = LogConfig(
|
||||
logger = get_module_logger("subheartflow", config=subheartflow_config)
|
||||
|
||||
|
||||
|
||||
def init_prompt():
|
||||
prompt = ""
|
||||
# prompt += f"麦麦的总体想法是:{self.main_heartflow_info}\n\n"
|
||||
@@ -41,14 +40,8 @@ def init_prompt():
|
||||
Prompt(prompt, "sub_heartflow_prompt_before")
|
||||
|
||||
|
||||
|
||||
class SubMind:
|
||||
def __init__(
|
||||
self,
|
||||
subheartflow_id: str,
|
||||
chat_state: ChatStateInfo,
|
||||
observations: Observation
|
||||
):
|
||||
def __init__(self, subheartflow_id: str, chat_state: ChatStateInfo, observations: Observation):
|
||||
self.subheartflow_id = subheartflow_id
|
||||
|
||||
self.llm_model = LLMRequest(
|
||||
@@ -151,7 +144,7 @@ class SubMind:
|
||||
|
||||
# ---------- 5. 执行LLM请求并处理响应 ----------
|
||||
content = "" # 初始化内容变量
|
||||
reasoning_content = "" # 初始化推理内容变量
|
||||
_reasoning_content = "" # 初始化推理内容变量
|
||||
|
||||
try:
|
||||
# 调用LLM生成响应
|
||||
@@ -212,7 +205,6 @@ class SubMind:
|
||||
|
||||
return self.current_mind, self.past_mind
|
||||
|
||||
|
||||
async def _execute_tool_calls(self, tool_calls, tool_instance):
|
||||
"""
|
||||
执行一组工具调用并收集结果
|
||||
@@ -245,7 +237,6 @@ class SubMind:
|
||||
logger.debug(f"工具调用收集到结构化信息: {safe_json_dumps(structured_info, ensure_ascii=False)}")
|
||||
self.structured_info = structured_info
|
||||
|
||||
|
||||
def update_current_mind(self, response):
|
||||
self.past_mind.append(self.current_mind)
|
||||
self.current_mind = response
|
||||
|
||||
@@ -67,12 +67,7 @@ class HeartFChatting:
|
||||
其生命周期现在由其关联的 SubHeartflow 的 FOCUSED 状态控制。
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
chat_id: str,
|
||||
sub_mind: SubMind,
|
||||
observations: Observation
|
||||
):
|
||||
def __init__(self, chat_id: str, sub_mind: SubMind, observations: Observation):
|
||||
"""
|
||||
HeartFChatting 初始化函数
|
||||
|
||||
@@ -438,7 +433,9 @@ class HeartFChatting:
|
||||
llm_error = False # LLM错误标志
|
||||
|
||||
try:
|
||||
prompt = await self._build_planner_prompt(observed_messages_str, current_mind, self.sub_mind.structured_info)
|
||||
prompt = await self._build_planner_prompt(
|
||||
observed_messages_str, current_mind, self.sub_mind.structured_info
|
||||
)
|
||||
payload = {
|
||||
"model": self.planner_llm.model_name,
|
||||
"messages": [{"role": "user", "content": prompt}],
|
||||
|
||||
Reference in New Issue
Block a user