Merge branch 'dev' of https://github.com/MaiM-with-u/MaiBot into dev
This commit is contained in:
@@ -215,8 +215,6 @@ class HeartFChatting:
|
||||
else:
|
||||
logger.warning(f"{self.log_prefix} 没有注册任何处理器。这可能是由于配置错误或所有处理器都被禁用了。")
|
||||
|
||||
|
||||
|
||||
async def start(self):
|
||||
"""检查是否需要启动主循环,如果未激活则启动。"""
|
||||
logger.debug(f"{self.log_prefix} 开始启动 HeartFChatting")
|
||||
@@ -397,8 +395,6 @@ class HeartFChatting:
|
||||
("\n前处理器耗时: " + "; ".join(processor_time_strings)) if processor_time_strings else ""
|
||||
)
|
||||
|
||||
|
||||
|
||||
logger.info(
|
||||
f"{self.log_prefix} 第{self._current_cycle_detail.cycle_id}次思考,"
|
||||
f"耗时: {self._current_cycle_detail.end_time - self._current_cycle_detail.start_time:.1f}秒, "
|
||||
@@ -557,8 +553,6 @@ class HeartFChatting:
|
||||
|
||||
return all_plan_info, processor_time_costs
|
||||
|
||||
|
||||
|
||||
async def _observe_process_plan_action_loop(self, cycle_timers: dict, thinking_id: str) -> dict:
|
||||
try:
|
||||
loop_start_time = time.time()
|
||||
|
||||
@@ -429,10 +429,7 @@ class DefaultReplyer:
|
||||
try:
|
||||
# 使用工具执行器获取信息
|
||||
tool_results = await self.tool_executor.execute_from_chat_message(
|
||||
sender = sender,
|
||||
target_message=text,
|
||||
chat_history=chat_history,
|
||||
return_details=False
|
||||
sender=sender, target_message=text, chat_history=chat_history, return_details=False
|
||||
)
|
||||
|
||||
if tool_results:
|
||||
|
||||
@@ -59,11 +59,7 @@ class ToolExecutor:
|
||||
logger.info(f"{self.log_prefix}工具执行器初始化完成,缓存{'启用' if enable_cache else '禁用'},TTL={cache_ttl}")
|
||||
|
||||
async def execute_from_chat_message(
|
||||
self,
|
||||
target_message: str,
|
||||
chat_history: list[str],
|
||||
sender: str,
|
||||
return_details: bool = False
|
||||
self, target_message: str, chat_history: list[str], sender: str, return_details: bool = False
|
||||
) -> List[Dict] | Tuple[List[Dict], List[str], str]:
|
||||
"""从聊天消息执行工具
|
||||
|
||||
@@ -113,10 +109,7 @@ class ToolExecutor:
|
||||
logger.debug(f"{self.log_prefix}开始LLM工具调用分析")
|
||||
|
||||
# 调用LLM进行工具决策
|
||||
response, other_info = await self.llm_model.generate_response_async(
|
||||
prompt=prompt,
|
||||
tools=tools
|
||||
)
|
||||
response, other_info = await self.llm_model.generate_response_async(prompt=prompt, tools=tools)
|
||||
|
||||
# 解析LLM响应
|
||||
if len(other_info) == 3:
|
||||
@@ -218,6 +211,7 @@ class ToolExecutor:
|
||||
str: 缓存键
|
||||
"""
|
||||
import hashlib
|
||||
|
||||
# 使用消息内容和群聊状态生成唯一缓存键
|
||||
content = f"{target_message}_{chat_history}_{sender}"
|
||||
return hashlib.md5(content.encode()).hexdigest()
|
||||
@@ -256,11 +250,7 @@ class ToolExecutor:
|
||||
if not self.enable_cache:
|
||||
return
|
||||
|
||||
self.tool_cache[cache_key] = {
|
||||
"result": result,
|
||||
"ttl": self.cache_ttl,
|
||||
"timestamp": time.time()
|
||||
}
|
||||
self.tool_cache[cache_key] = {"result": result, "ttl": self.cache_ttl, "timestamp": time.time()}
|
||||
logger.debug(f"{self.log_prefix}设置缓存,TTL: {self.cache_ttl}")
|
||||
|
||||
def _cleanup_expired_cache(self):
|
||||
@@ -289,10 +279,7 @@ class ToolExecutor:
|
||||
return [tool.get("function", {}).get("name", "unknown") for tool in tools]
|
||||
|
||||
async def execute_specific_tool(
|
||||
self,
|
||||
tool_name: str,
|
||||
tool_args: Dict,
|
||||
validate_args: bool = True
|
||||
self, tool_name: str, tool_args: Dict, validate_args: bool = True
|
||||
) -> Optional[Dict]:
|
||||
"""直接执行指定工具
|
||||
|
||||
@@ -305,10 +292,7 @@ class ToolExecutor:
|
||||
Optional[Dict]: 工具执行结果,失败时返回None
|
||||
"""
|
||||
try:
|
||||
tool_call = {
|
||||
"name": tool_name,
|
||||
"arguments": tool_args
|
||||
}
|
||||
tool_call = {"name": tool_name, "arguments": tool_args}
|
||||
|
||||
logger.info(f"{self.log_prefix}直接执行工具: {tool_name}")
|
||||
|
||||
@@ -360,7 +344,7 @@ class ToolExecutor:
|
||||
"enabled": True,
|
||||
"cache_count": total_count,
|
||||
"cache_ttl": self.cache_ttl,
|
||||
"ttl_distribution": ttl_distribution
|
||||
"ttl_distribution": ttl_distribution,
|
||||
}
|
||||
|
||||
def set_cache_config(self, enable_cache: bool = None, cache_ttl: int = None):
|
||||
|
||||
Reference in New Issue
Block a user