This commit is contained in:
SengokuCola
2025-07-01 18:14:47 +08:00
3 changed files with 86 additions and 111 deletions

View File

@@ -215,8 +215,6 @@ class HeartFChatting:
else: else:
logger.warning(f"{self.log_prefix} 没有注册任何处理器。这可能是由于配置错误或所有处理器都被禁用了。") logger.warning(f"{self.log_prefix} 没有注册任何处理器。这可能是由于配置错误或所有处理器都被禁用了。")
async def start(self): async def start(self):
"""检查是否需要启动主循环,如果未激活则启动。""" """检查是否需要启动主循环,如果未激活则启动。"""
logger.debug(f"{self.log_prefix} 开始启动 HeartFChatting") logger.debug(f"{self.log_prefix} 开始启动 HeartFChatting")
@@ -397,8 +395,6 @@ class HeartFChatting:
("\n前处理器耗时: " + "; ".join(processor_time_strings)) if processor_time_strings else "" ("\n前处理器耗时: " + "; ".join(processor_time_strings)) if processor_time_strings else ""
) )
logger.info( logger.info(
f"{self.log_prefix}{self._current_cycle_detail.cycle_id}次思考," f"{self.log_prefix}{self._current_cycle_detail.cycle_id}次思考,"
f"耗时: {self._current_cycle_detail.end_time - self._current_cycle_detail.start_time:.1f}秒, " f"耗时: {self._current_cycle_detail.end_time - self._current_cycle_detail.start_time:.1f}秒, "
@@ -557,8 +553,6 @@ class HeartFChatting:
return all_plan_info, processor_time_costs return all_plan_info, processor_time_costs
async def _observe_process_plan_action_loop(self, cycle_timers: dict, thinking_id: str) -> dict: async def _observe_process_plan_action_loop(self, cycle_timers: dict, thinking_id: str) -> dict:
try: try:
loop_start_time = time.time() loop_start_time = time.time()

View File

@@ -429,10 +429,7 @@ class DefaultReplyer:
try: try:
# 使用工具执行器获取信息 # 使用工具执行器获取信息
tool_results = await self.tool_executor.execute_from_chat_message( tool_results = await self.tool_executor.execute_from_chat_message(
sender = sender, sender=sender, target_message=text, chat_history=chat_history, return_details=False
target_message=text,
chat_history=chat_history,
return_details=False
) )
if tool_results: if tool_results:

View File

@@ -59,11 +59,7 @@ class ToolExecutor:
logger.info(f"{self.log_prefix}工具执行器初始化完成,缓存{'启用' if enable_cache else '禁用'}TTL={cache_ttl}") logger.info(f"{self.log_prefix}工具执行器初始化完成,缓存{'启用' if enable_cache else '禁用'}TTL={cache_ttl}")
async def execute_from_chat_message( async def execute_from_chat_message(
self, self, target_message: str, chat_history: list[str], sender: str, return_details: bool = False
target_message: str,
chat_history: list[str],
sender: str,
return_details: bool = False
) -> List[Dict] | Tuple[List[Dict], List[str], str]: ) -> List[Dict] | Tuple[List[Dict], List[str], str]:
"""从聊天消息执行工具 """从聊天消息执行工具
@@ -113,10 +109,7 @@ class ToolExecutor:
logger.debug(f"{self.log_prefix}开始LLM工具调用分析") logger.debug(f"{self.log_prefix}开始LLM工具调用分析")
# 调用LLM进行工具决策 # 调用LLM进行工具决策
response, other_info = await self.llm_model.generate_response_async( response, other_info = await self.llm_model.generate_response_async(prompt=prompt, tools=tools)
prompt=prompt,
tools=tools
)
# 解析LLM响应 # 解析LLM响应
if len(other_info) == 3: if len(other_info) == 3:
@@ -218,6 +211,7 @@ class ToolExecutor:
str: 缓存键 str: 缓存键
""" """
import hashlib import hashlib
# 使用消息内容和群聊状态生成唯一缓存键 # 使用消息内容和群聊状态生成唯一缓存键
content = f"{target_message}_{chat_history}_{sender}" content = f"{target_message}_{chat_history}_{sender}"
return hashlib.md5(content.encode()).hexdigest() return hashlib.md5(content.encode()).hexdigest()
@@ -256,11 +250,7 @@ class ToolExecutor:
if not self.enable_cache: if not self.enable_cache:
return return
self.tool_cache[cache_key] = { self.tool_cache[cache_key] = {"result": result, "ttl": self.cache_ttl, "timestamp": time.time()}
"result": result,
"ttl": self.cache_ttl,
"timestamp": time.time()
}
logger.debug(f"{self.log_prefix}设置缓存TTL: {self.cache_ttl}") logger.debug(f"{self.log_prefix}设置缓存TTL: {self.cache_ttl}")
def _cleanup_expired_cache(self): def _cleanup_expired_cache(self):
@@ -289,10 +279,7 @@ class ToolExecutor:
return [tool.get("function", {}).get("name", "unknown") for tool in tools] return [tool.get("function", {}).get("name", "unknown") for tool in tools]
async def execute_specific_tool( async def execute_specific_tool(
self, self, tool_name: str, tool_args: Dict, validate_args: bool = True
tool_name: str,
tool_args: Dict,
validate_args: bool = True
) -> Optional[Dict]: ) -> Optional[Dict]:
"""直接执行指定工具 """直接执行指定工具
@@ -305,10 +292,7 @@ class ToolExecutor:
Optional[Dict]: 工具执行结果失败时返回None Optional[Dict]: 工具执行结果失败时返回None
""" """
try: try:
tool_call = { tool_call = {"name": tool_name, "arguments": tool_args}
"name": tool_name,
"arguments": tool_args
}
logger.info(f"{self.log_prefix}直接执行工具: {tool_name}") logger.info(f"{self.log_prefix}直接执行工具: {tool_name}")
@@ -360,7 +344,7 @@ class ToolExecutor:
"enabled": True, "enabled": True,
"cache_count": total_count, "cache_count": total_count,
"cache_ttl": self.cache_ttl, "cache_ttl": self.cache_ttl,
"ttl_distribution": ttl_distribution "ttl_distribution": ttl_distribution,
} }
def set_cache_config(self, enable_cache: bool = None, cache_ttl: int = None): def set_cache_config(self, enable_cache: bool = None, cache_ttl: int = None):