feat:拆分重命名模型配置,修复动作恢复问题
This commit is contained in:
@@ -78,10 +78,10 @@ class DefaultExpressor:
|
||||
self.log_prefix = "expressor"
|
||||
# TODO: API-Adapter修改标记
|
||||
self.express_model = LLMRequest(
|
||||
model=global_config.model.normal,
|
||||
temperature=global_config.model.normal["temp"],
|
||||
model=global_config.model.focus_expressor,
|
||||
temperature=global_config.model.focus_expressor["temp"],
|
||||
max_tokens=256,
|
||||
request_type="response_heartflow",
|
||||
request_type="focus_expressor",
|
||||
)
|
||||
self.heart_fc_sender = HeartFCSender()
|
||||
|
||||
|
||||
@@ -27,9 +27,6 @@ class ActionProcessor(BaseProcessor):
|
||||
"""初始化观察处理器"""
|
||||
super().__init__()
|
||||
# TODO: API-Adapter修改标记
|
||||
self.model_summary = LLMRequest(
|
||||
model=global_config.model.observation, temperature=0.7, max_tokens=300, request_type="chat_observation"
|
||||
)
|
||||
|
||||
async def process_info(
|
||||
self,
|
||||
|
||||
@@ -71,10 +71,10 @@ class MindProcessor(BaseProcessor):
|
||||
self.subheartflow_id = subheartflow_id
|
||||
|
||||
self.llm_model = LLMRequest(
|
||||
model=global_config.model.sub_heartflow,
|
||||
temperature=global_config.model.sub_heartflow["temp"],
|
||||
model=global_config.model.focus_chat_mind,
|
||||
temperature=global_config.model.focus_chat_mind["temp"],
|
||||
max_tokens=800,
|
||||
request_type="sub_heart_flow",
|
||||
request_type="focus_chat_mind",
|
||||
)
|
||||
|
||||
self.current_mind = ""
|
||||
|
||||
@@ -54,10 +54,10 @@ class SelfProcessor(BaseProcessor):
|
||||
self.subheartflow_id = subheartflow_id
|
||||
|
||||
self.llm_model = LLMRequest(
|
||||
model=global_config.model.sub_heartflow,
|
||||
temperature=global_config.model.sub_heartflow["temp"],
|
||||
model=global_config.model.focus_self_recognize,
|
||||
temperature=global_config.model.focus_self_recognize["temp"],
|
||||
max_tokens=800,
|
||||
request_type="self_identify",
|
||||
request_type="focus_self_identify",
|
||||
)
|
||||
|
||||
name = chat_manager.get_stream_name(self.subheartflow_id)
|
||||
|
||||
@@ -49,9 +49,9 @@ class ToolProcessor(BaseProcessor):
|
||||
self.subheartflow_id = subheartflow_id
|
||||
self.log_prefix = f"[{subheartflow_id}:ToolExecutor] "
|
||||
self.llm_model = LLMRequest(
|
||||
model=global_config.model.tool_use,
|
||||
model=global_config.model.focus_tool_use,
|
||||
max_tokens=500,
|
||||
request_type="tool_execution",
|
||||
request_type="focus_tool",
|
||||
)
|
||||
self.structured_info = []
|
||||
|
||||
|
||||
@@ -61,10 +61,10 @@ class WorkingMemoryProcessor(BaseProcessor):
|
||||
self.subheartflow_id = subheartflow_id
|
||||
|
||||
self.llm_model = LLMRequest(
|
||||
model=global_config.model.sub_heartflow,
|
||||
temperature=global_config.model.sub_heartflow["temp"],
|
||||
model=global_config.model.focus_chat_mind,
|
||||
temperature=global_config.model.focus_chat_mind["temp"],
|
||||
max_tokens=800,
|
||||
request_type="working_memory",
|
||||
request_type="focus_working_memory",
|
||||
)
|
||||
|
||||
name = chat_manager.get_stream_name(self.subheartflow_id)
|
||||
|
||||
@@ -36,7 +36,7 @@ class MemoryActivator:
|
||||
def __init__(self):
|
||||
# TODO: API-Adapter修改标记
|
||||
self.summary_model = LLMRequest(
|
||||
model=global_config.model.summary, temperature=0.7, max_tokens=50, request_type="chat_observation"
|
||||
model=global_config.model.memory_summary, temperature=0.7, max_tokens=50, request_type="chat_observation"
|
||||
)
|
||||
self.running_memory = []
|
||||
|
||||
|
||||
@@ -28,8 +28,7 @@ class ActionManager:
|
||||
self._registered_actions: Dict[str, ActionInfo] = {}
|
||||
# 当前正在使用的动作集合,默认加载默认动作
|
||||
self._using_actions: Dict[str, ActionInfo] = {}
|
||||
# 临时备份原始使用中的动作
|
||||
self._original_actions_backup: Optional[Dict[str, ActionInfo]] = None
|
||||
|
||||
|
||||
# 默认动作集,仅作为快照,用于恢复默认
|
||||
self._default_actions: Dict[str, ActionInfo] = {}
|
||||
@@ -278,22 +277,18 @@ class ActionManager:
|
||||
return True
|
||||
|
||||
def temporarily_remove_actions(self, actions_to_remove: List[str]) -> None:
|
||||
"""临时移除使用集中的指定动作,备份原始使用集"""
|
||||
if self._original_actions_backup is None:
|
||||
self._original_actions_backup = self._using_actions.copy()
|
||||
"""临时移除使用集中的指定动作"""
|
||||
for name in actions_to_remove:
|
||||
self._using_actions.pop(name, None)
|
||||
|
||||
def restore_actions(self) -> None:
|
||||
"""恢复之前备份的原始使用集"""
|
||||
if self._original_actions_backup is not None:
|
||||
self._using_actions = self._original_actions_backup.copy()
|
||||
self._original_actions_backup = None
|
||||
"""恢复到默认动作集"""
|
||||
logger.debug(f"恢复动作集: 从 {list(self._using_actions.keys())} 恢复到默认动作集 {list(self._default_actions.keys())}")
|
||||
self._using_actions = self._default_actions.copy()
|
||||
|
||||
def restore_default_actions(self) -> None:
|
||||
"""恢复默认动作集到使用集"""
|
||||
self._using_actions = self._default_actions.copy()
|
||||
self._original_actions_backup = None
|
||||
|
||||
def get_action(self, action_name: str) -> Optional[Type[BaseAction]]:
|
||||
"""
|
||||
|
||||
@@ -78,9 +78,9 @@ class ActionPlanner:
|
||||
self.log_prefix = log_prefix
|
||||
# LLM规划器配置
|
||||
self.planner_llm = LLMRequest(
|
||||
model=global_config.model.plan,
|
||||
model=global_config.model.focus_planner,
|
||||
max_tokens=1000,
|
||||
request_type="action_planning", # 用于动作规划
|
||||
request_type="focus_planner", # 用于动作规划
|
||||
)
|
||||
|
||||
self.action_manager = action_manager
|
||||
@@ -161,6 +161,10 @@ class ActionPlanner:
|
||||
action = "no_reply"
|
||||
reasoning = "没有可用的动作" if not current_available_actions else "只有no_reply动作可用,跳过规划"
|
||||
logger.info(f"{self.log_prefix}{reasoning}")
|
||||
self.action_manager.restore_actions()
|
||||
logger.debug(
|
||||
f"{self.log_prefix}恢复到默认动作集, 当前可用: {list(self.action_manager.get_using_actions().keys())}"
|
||||
)
|
||||
return {
|
||||
"action_result": {"action_type": action, "action_data": action_data, "reasoning": reasoning},
|
||||
"current_mind": current_mind,
|
||||
@@ -241,10 +245,10 @@ class ActionPlanner:
|
||||
f"{self.log_prefix}规划器Prompt:\n{prompt}\n\n决策动作:{action},\n动作信息: '{action_data}'\n理由: {reasoning}"
|
||||
)
|
||||
|
||||
# 恢复原始动作集
|
||||
# 恢复到默认动作集
|
||||
self.action_manager.restore_actions()
|
||||
logger.debug(
|
||||
f"{self.log_prefix}恢复了原始动作集, 当前可用: {list(self.action_manager.get_using_actions().keys())}"
|
||||
f"{self.log_prefix}恢复到默认动作集, 当前可用: {list(self.action_manager.get_using_actions().keys())}"
|
||||
)
|
||||
|
||||
action_result = {"action_type": action, "action_data": action_data, "reasoning": reasoning}
|
||||
|
||||
@@ -33,7 +33,7 @@ class MemoryManager:
|
||||
self._id_map: Dict[str, MemoryItem] = {}
|
||||
|
||||
self.llm_summarizer = LLMRequest(
|
||||
model=global_config.model.summary, temperature=0.3, max_tokens=512, request_type="memory_summarization"
|
||||
model=global_config.model.focus_working_memory, temperature=0.3, max_tokens=512, request_type="memory_summarization"
|
||||
)
|
||||
|
||||
@property
|
||||
|
||||
Reference in New Issue
Block a user