Merge branch 'master' of https://github.com/MaiBot-Plus/MaiMbot-Pro-Max
This commit is contained in:
@@ -522,6 +522,11 @@ class OpenaiClient(BaseClient):
|
|||||||
except APIStatusError as e:
|
except APIStatusError as e:
|
||||||
# 重封装APIError为RespNotOkException
|
# 重封装APIError为RespNotOkException
|
||||||
raise RespNotOkException(e.status_code) from e
|
raise RespNotOkException(e.status_code) from e
|
||||||
|
except Exception as e:
|
||||||
|
# 添加通用异常处理和日志记录
|
||||||
|
logger.error(f"获取嵌入时发生未知错误: {str(e)}")
|
||||||
|
logger.error(f"错误类型: {type(e)}")
|
||||||
|
raise
|
||||||
|
|
||||||
response = APIResponse()
|
response = APIResponse()
|
||||||
|
|
||||||
|
|||||||
@@ -516,7 +516,8 @@ class LLMRequest:
|
|||||||
wait_interval, compressed_messages = self._default_exception_handler(
|
wait_interval, compressed_messages = self._default_exception_handler(
|
||||||
e,
|
e,
|
||||||
self.task_name,
|
self.task_name,
|
||||||
model_name=model_info.name,
|
model_info=model_info,
|
||||||
|
api_provider=api_provider,
|
||||||
remain_try=retry_remain,
|
remain_try=retry_remain,
|
||||||
retry_interval=api_provider.retry_interval,
|
retry_interval=api_provider.retry_interval,
|
||||||
messages=(message_list, compressed_messages is not None) if message_list else None,
|
messages=(message_list, compressed_messages is not None) if message_list else None,
|
||||||
@@ -539,7 +540,8 @@ class LLMRequest:
|
|||||||
self,
|
self,
|
||||||
e: Exception,
|
e: Exception,
|
||||||
task_name: str,
|
task_name: str,
|
||||||
model_name: str,
|
model_info: ModelInfo,
|
||||||
|
api_provider: APIProvider,
|
||||||
remain_try: int,
|
remain_try: int,
|
||||||
retry_interval: int = 10,
|
retry_interval: int = 10,
|
||||||
messages: Tuple[List[Message], bool] | None = None,
|
messages: Tuple[List[Message], bool] | None = None,
|
||||||
@@ -549,13 +551,15 @@ class LLMRequest:
|
|||||||
Args:
|
Args:
|
||||||
e (Exception): 异常对象
|
e (Exception): 异常对象
|
||||||
task_name (str): 任务名称
|
task_name (str): 任务名称
|
||||||
model_name (str): 模型名称
|
model_info (ModelInfo): 模型信息
|
||||||
|
api_provider (APIProvider): API提供商
|
||||||
remain_try (int): 剩余尝试次数
|
remain_try (int): 剩余尝试次数
|
||||||
retry_interval (int): 重试间隔
|
retry_interval (int): 重试间隔
|
||||||
messages (tuple[list[Message], bool] | None): (消息列表, 是否已压缩过)
|
messages (tuple[list[Message], bool] | None): (消息列表, 是否已压缩过)
|
||||||
Returns:
|
Returns:
|
||||||
(等待间隔(如果为0则不等待,为-1则不再请求该模型), 新的消息列表(适用于压缩消息))
|
(等待间隔(如果为0则不等待,为-1则不再请求该模型), 新的消息列表(适用于压缩消息))
|
||||||
"""
|
"""
|
||||||
|
model_name = model_info.name
|
||||||
|
|
||||||
if isinstance(e, NetworkConnectionError): # 网络连接错误
|
if isinstance(e, NetworkConnectionError): # 网络连接错误
|
||||||
return self._check_retry(
|
return self._check_retry(
|
||||||
@@ -571,7 +575,8 @@ class LLMRequest:
|
|||||||
return self._handle_resp_not_ok(
|
return self._handle_resp_not_ok(
|
||||||
e,
|
e,
|
||||||
task_name,
|
task_name,
|
||||||
model_name,
|
model_info,
|
||||||
|
api_provider,
|
||||||
remain_try,
|
remain_try,
|
||||||
retry_interval,
|
retry_interval,
|
||||||
messages,
|
messages,
|
||||||
@@ -622,7 +627,8 @@ class LLMRequest:
|
|||||||
self,
|
self,
|
||||||
e: RespNotOkException,
|
e: RespNotOkException,
|
||||||
task_name: str,
|
task_name: str,
|
||||||
model_name: str,
|
model_info: ModelInfo,
|
||||||
|
api_provider: APIProvider,
|
||||||
remain_try: int,
|
remain_try: int,
|
||||||
retry_interval: int = 10,
|
retry_interval: int = 10,
|
||||||
messages: tuple[list[Message], bool] | None = None,
|
messages: tuple[list[Message], bool] | None = None,
|
||||||
@@ -632,7 +638,8 @@ class LLMRequest:
|
|||||||
Args:
|
Args:
|
||||||
e (RespNotOkException): 响应错误异常对象
|
e (RespNotOkException): 响应错误异常对象
|
||||||
task_name (str): 任务名称
|
task_name (str): 任务名称
|
||||||
model_name (str): 模型名称
|
model_info (ModelInfo): 模型信息
|
||||||
|
api_provider (APIProvider): API提供商
|
||||||
remain_try (int): 剩余尝试次数
|
remain_try (int): 剩余尝试次数
|
||||||
retry_interval (int): 重试间隔
|
retry_interval (int): 重试间隔
|
||||||
messages (tuple[list[Message], bool] | None): (消息列表, 是否已压缩过)
|
messages (tuple[list[Message], bool] | None): (消息列表, 是否已压缩过)
|
||||||
@@ -641,6 +648,23 @@ class LLMRequest:
|
|||||||
"""
|
"""
|
||||||
# 响应错误
|
# 响应错误
|
||||||
if e.status_code in [400, 401, 402, 403, 404]:
|
if e.status_code in [400, 401, 402, 403, 404]:
|
||||||
|
model_name = model_info.name
|
||||||
|
if (
|
||||||
|
e.status_code == 403
|
||||||
|
and model_name.startswith("Pro/deepseek-ai")
|
||||||
|
and api_provider.base_url == "https://api.siliconflow.cn/v1/"
|
||||||
|
):
|
||||||
|
old_model_name = model_name
|
||||||
|
new_model_name = model_name[4:]
|
||||||
|
model_info.name = new_model_name
|
||||||
|
logger.warning(f"检测到403错误,模型从 {old_model_name} 降级为 {new_model_name}")
|
||||||
|
# 更新任务配置中的模型列表
|
||||||
|
for i, m_name in enumerate(self.model_for_task.model_list):
|
||||||
|
if m_name == old_model_name:
|
||||||
|
self.model_for_task.model_list[i] = new_model_name
|
||||||
|
logger.warning(f"将任务 {self.task_name} 的模型列表中的 {old_model_name} 临时降级至 {new_model_name}")
|
||||||
|
break
|
||||||
|
return 0, None # 立即重试
|
||||||
# 客户端错误
|
# 客户端错误
|
||||||
logger.warning(
|
logger.warning(
|
||||||
f"任务-'{task_name}' 模型-'{model_name}': 请求失败,错误代码-{e.status_code},错误信息-{e.message}"
|
f"任务-'{task_name}' 模型-'{model_name}': 请求失败,错误代码-{e.status_code},错误信息-{e.message}"
|
||||||
|
|||||||
Reference in New Issue
Block a user