From bbebca832f665d7875cdf955a67cc62fec245d70 Mon Sep 17 00:00:00 2001 From: Furina-1013-create <189647097+Furina-1013-create@users.noreply.github.com> Date: Fri, 29 Aug 2025 12:09:27 +0800 Subject: [PATCH] =?UTF-8?q?=E8=A7=A3=E5=86=B3=E6=9F=90=E4=BA=BA=E4=B8=8D?= =?UTF-8?q?=E4=BC=9A=E6=94=B9=E7=9A=84=E6=97=A5=E5=BF=97=E5=88=B7=E5=B1=8F?= =?UTF-8?q?=E9=97=AE=E9=A2=98=EF=BC=8C=E6=98=AF=E8=B0=81=E6=88=91=E4=B8=8D?= =?UTF-8?q?=E8=AF=B4()?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/llm_models/utils_model.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/llm_models/utils_model.py b/src/llm_models/utils_model.py index 01859d257..8341653ff 100644 --- a/src/llm_models/utils_model.py +++ b/src/llm_models/utils_model.py @@ -294,7 +294,7 @@ class LLMRequest: for model_info, api_provider, client in model_scheduler: start_time = time.time() model_name = model_info.name - logger.info(f"正在尝试使用模型: {model_name}") + logger.debug(f"正在尝试使用模型: {model_name}") # 你不许刷屏 try: # 检查是否启用反截断 @@ -370,7 +370,7 @@ class LLMRequest: raise RuntimeError("生成空回复") content = "生成的响应为空" - logger.info(f"模型 '{model_name}' 成功生成回复。") + logger.debug(f"模型 '{model_name}' 成功生成回复。") # 你也不许刷屏 return content, (reasoning_content, model_name, tool_calls) except RespNotOkException as e: