空响应就raise

This commit is contained in:
UnCLAS-Prommer
2025-08-06 13:06:53 +08:00
parent cc3d910cf6
commit 3d98b56c15
2 changed files with 5 additions and 2 deletions

View File

@@ -138,6 +138,7 @@ class LLMRequest:
temperature: Optional[float] = None, temperature: Optional[float] = None,
max_tokens: Optional[int] = None, max_tokens: Optional[int] = None,
tools: Optional[List[Dict[str, Any]]] = None, tools: Optional[List[Dict[str, Any]]] = None,
raise_when_empty: bool = True,
) -> Tuple[str, Tuple[str, str, Optional[List[ToolCall]]]]: ) -> Tuple[str, Tuple[str, str, Optional[List[ToolCall]]]]:
""" """
异步生成响应 异步生成响应
@@ -183,7 +184,9 @@ class LLMRequest:
endpoint="/chat/completions", endpoint="/chat/completions",
) )
if not content: if not content:
if raise_when_empty:
logger.warning("生成的响应为空") logger.warning("生成的响应为空")
raise RuntimeError("生成的响应为空")
content = "生成的响应为空,请检查模型配置或输入内容是否正确" content = "生成的响应为空,请检查模型配置或输入内容是否正确"
return content, (reasoning_content, model_info.name, tool_calls) return content, (reasoning_content, model_info.name, tool_calls)

View File

@@ -111,7 +111,7 @@ class ToolExecutor:
# 调用LLM进行工具决策 # 调用LLM进行工具决策
response, (reasoning_content, model_name, tool_calls) = await self.llm_model.generate_response_async( response, (reasoning_content, model_name, tool_calls) = await self.llm_model.generate_response_async(
prompt=prompt, tools=tools prompt=prompt, tools=tools, raise_when_empty=False
) )
# 执行工具调用 # 执行工具调用