🤖 自动格式化代码 [skip ci]
This commit is contained in:
@@ -28,7 +28,6 @@ from src.chat.focus_chat.planners.action_manager import ActionManager
|
||||
from src.config.config import global_config
|
||||
from src.chat.focus_chat.hfc_performance_logger import HFCPerformanceLogger
|
||||
from src.chat.focus_chat.hfc_version_manager import get_hfc_version
|
||||
from src.chat.focus_chat.info.structured_info import StructuredInfo
|
||||
from src.person_info.relationship_builder_manager import relationship_builder_manager
|
||||
|
||||
|
||||
@@ -218,8 +217,6 @@ class HeartFChatting:
|
||||
else:
|
||||
logger.warning(f"{self.log_prefix} 没有注册任何处理器。这可能是由于配置错误或所有处理器都被禁用了。")
|
||||
|
||||
|
||||
|
||||
async def start(self):
|
||||
"""检查是否需要启动主循环,如果未激活则启动。"""
|
||||
logger.debug(f"{self.log_prefix} 开始启动 HeartFChatting")
|
||||
@@ -400,8 +397,6 @@ class HeartFChatting:
|
||||
("\n前处理器耗时: " + "; ".join(processor_time_strings)) if processor_time_strings else ""
|
||||
)
|
||||
|
||||
|
||||
|
||||
logger.info(
|
||||
f"{self.log_prefix} 第{self._current_cycle_detail.cycle_id}次思考,"
|
||||
f"耗时: {self._current_cycle_detail.end_time - self._current_cycle_detail.start_time:.1f}秒, "
|
||||
@@ -560,8 +555,6 @@ class HeartFChatting:
|
||||
|
||||
return all_plan_info, processor_time_costs
|
||||
|
||||
|
||||
|
||||
async def _observe_process_plan_action_loop(self, cycle_timers: dict, thinking_id: str) -> dict:
|
||||
try:
|
||||
loop_start_time = time.time()
|
||||
|
||||
@@ -163,14 +163,7 @@ class DefaultReplyer:
|
||||
|
||||
self.heart_fc_sender = HeartFCSender()
|
||||
self.memory_activator = MemoryActivator()
|
||||
self.tool_executor = ToolExecutor(
|
||||
chat_id=self.chat_stream.stream_id,
|
||||
enable_cache=True,
|
||||
cache_ttl=3
|
||||
)
|
||||
|
||||
|
||||
|
||||
self.tool_executor = ToolExecutor(chat_id=self.chat_stream.stream_id, enable_cache=True, cache_ttl=3)
|
||||
|
||||
def _select_weighted_model_config(self) -> Dict[str, Any]:
|
||||
"""使用加权随机选择来挑选一个模型配置"""
|
||||
@@ -425,10 +418,7 @@ class DefaultReplyer:
|
||||
try:
|
||||
# 使用工具执行器获取信息
|
||||
tool_results = await self.tool_executor.execute_from_chat_message(
|
||||
sender = sender,
|
||||
target_message=text,
|
||||
chat_history=chat_history,
|
||||
return_details=False
|
||||
sender=sender, target_message=text, chat_history=chat_history, return_details=False
|
||||
)
|
||||
|
||||
if tool_results:
|
||||
|
||||
@@ -2,7 +2,6 @@ from src.llm_models.utils_model import LLMRequest
|
||||
from src.config.config import global_config
|
||||
import time
|
||||
from src.common.logger import get_logger
|
||||
from src.individuality.individuality import get_individuality
|
||||
from src.chat.utils.prompt_builder import Prompt, global_prompt_manager
|
||||
from src.tools.tool_use import ToolUser
|
||||
from src.chat.utils.json_utils import process_llm_tool_calls
|
||||
@@ -60,11 +59,7 @@ class ToolExecutor:
|
||||
logger.info(f"{self.log_prefix}工具执行器初始化完成,缓存{'启用' if enable_cache else '禁用'},TTL={cache_ttl}")
|
||||
|
||||
async def execute_from_chat_message(
|
||||
self,
|
||||
target_message: str,
|
||||
chat_history: list[str],
|
||||
sender: str,
|
||||
return_details: bool = False
|
||||
self, target_message: str, chat_history: list[str], sender: str, return_details: bool = False
|
||||
) -> List[Dict] | Tuple[List[Dict], List[str], str]:
|
||||
"""从聊天消息执行工具
|
||||
|
||||
@@ -114,10 +109,7 @@ class ToolExecutor:
|
||||
logger.debug(f"{self.log_prefix}开始LLM工具调用分析")
|
||||
|
||||
# 调用LLM进行工具决策
|
||||
response, other_info = await self.llm_model.generate_response_async(
|
||||
prompt=prompt,
|
||||
tools=tools
|
||||
)
|
||||
response, other_info = await self.llm_model.generate_response_async(prompt=prompt, tools=tools)
|
||||
|
||||
# 解析LLM响应
|
||||
if len(other_info) == 3:
|
||||
@@ -219,6 +211,7 @@ class ToolExecutor:
|
||||
str: 缓存键
|
||||
"""
|
||||
import hashlib
|
||||
|
||||
# 使用消息内容和群聊状态生成唯一缓存键
|
||||
content = f"{target_message}_{chat_history}_{sender}"
|
||||
return hashlib.md5(content.encode()).hexdigest()
|
||||
@@ -257,11 +250,7 @@ class ToolExecutor:
|
||||
if not self.enable_cache:
|
||||
return
|
||||
|
||||
self.tool_cache[cache_key] = {
|
||||
"result": result,
|
||||
"ttl": self.cache_ttl,
|
||||
"timestamp": time.time()
|
||||
}
|
||||
self.tool_cache[cache_key] = {"result": result, "ttl": self.cache_ttl, "timestamp": time.time()}
|
||||
logger.debug(f"{self.log_prefix}设置缓存,TTL: {self.cache_ttl}")
|
||||
|
||||
def _cleanup_expired_cache(self):
|
||||
@@ -290,10 +279,7 @@ class ToolExecutor:
|
||||
return [tool.get("function", {}).get("name", "unknown") for tool in tools]
|
||||
|
||||
async def execute_specific_tool(
|
||||
self,
|
||||
tool_name: str,
|
||||
tool_args: Dict,
|
||||
validate_args: bool = True
|
||||
self, tool_name: str, tool_args: Dict, validate_args: bool = True
|
||||
) -> Optional[Dict]:
|
||||
"""直接执行指定工具
|
||||
|
||||
@@ -306,10 +292,7 @@ class ToolExecutor:
|
||||
Optional[Dict]: 工具执行结果,失败时返回None
|
||||
"""
|
||||
try:
|
||||
tool_call = {
|
||||
"name": tool_name,
|
||||
"arguments": tool_args
|
||||
}
|
||||
tool_call = {"name": tool_name, "arguments": tool_args}
|
||||
|
||||
logger.info(f"{self.log_prefix}直接执行工具: {tool_name}")
|
||||
|
||||
@@ -361,7 +344,7 @@ class ToolExecutor:
|
||||
"enabled": True,
|
||||
"cache_count": total_count,
|
||||
"cache_ttl": self.cache_ttl,
|
||||
"ttl_distribution": ttl_distribution
|
||||
"ttl_distribution": ttl_distribution,
|
||||
}
|
||||
|
||||
def set_cache_config(self, enable_cache: bool = None, cache_ttl: int = None):
|
||||
|
||||
Reference in New Issue
Block a user