fix:V3哈气

This commit is contained in:
SengokuCola
2025-04-28 01:31:59 +08:00
parent 412efe79d2
commit c77f468dfc
3 changed files with 14 additions and 2 deletions

View File

@@ -8,8 +8,8 @@ from src.plugins.moods.moods import MoodManager
logger = get_logger("mai_state") logger = get_logger("mai_state")
# enable_unlimited_hfc_chat = True enable_unlimited_hfc_chat = True
enable_unlimited_hfc_chat = False # enable_unlimited_hfc_chat = False
class MaiState(enum.Enum): class MaiState(enum.Enum):

View File

@@ -762,9 +762,15 @@ class HeartFChatting:
# 执行LLM请求 # 执行LLM请求
try: try:
print("prompt")
print("prompt")
print("prompt")
print(payload)
print(prompt)
response = await self.planner_llm._execute_request( response = await self.planner_llm._execute_request(
endpoint="/chat/completions", payload=payload, prompt=prompt endpoint="/chat/completions", payload=payload, prompt=prompt
) )
print(response)
except Exception as req_e: except Exception as req_e:
logger.error(f"{self.log_prefix}[Planner] LLM请求执行失败: {req_e}") logger.error(f"{self.log_prefix}[Planner] LLM请求执行失败: {req_e}")
return { return {
@@ -779,6 +785,9 @@ class HeartFChatting:
# 处理LLM响应 # 处理LLM响应
with Timer("使用工具", cycle_timers): with Timer("使用工具", cycle_timers):
# 使用辅助函数处理工具调用响应 # 使用辅助函数处理工具调用响应
print(1111122222222222)
print(response)
success, arguments, error_msg = process_llm_tool_response( success, arguments, error_msg = process_llm_tool_response(
response, expected_tool_name="decide_reply_action", log_prefix=f"{self.log_prefix}[Planner] " response, expected_tool_name="decide_reply_action", log_prefix=f"{self.log_prefix}[Planner] "
) )

View File

@@ -213,6 +213,9 @@ def process_llm_tool_calls(response: List[Any], log_prefix: str = "") -> Tuple[b
元组 (成功标志, 工具调用列表, 错误消息) 元组 (成功标志, 工具调用列表, 错误消息)
""" """
# 确保响应格式正确 # 确保响应格式正确
print(response)
print(11111111111111111)
if len(response) != 3: if len(response) != 3:
return False, [], f"LLM响应元素数量不正确: 预期3个元素实际{len(response)}" return False, [], f"LLM响应元素数量不正确: 预期3个元素实际{len(response)}"