This commit is contained in:
春河晴
2025-06-05 15:52:28 +09:00
parent d4a76e094f
commit 3288051b42
3 changed files with 24 additions and 10 deletions

View File

@@ -0,0 +1,13 @@
import ssl
import certifi
import aiohttp
ssl_context = ssl.create_default_context(cafile=certifi.where())
connector = None
async def get_tcp_connector():
global connector
if connector is None:
connector = aiohttp.TCPConnector(ssl=ssl_context)
return connector

View File

@@ -6,6 +6,7 @@ from typing import Tuple, Union
import aiohttp
import requests
from src.common.logger import get_module_logger
from src.common.tcp_connector import get_tcp_connector
from rich.traceback import install
install(extra_lines=3)
@@ -94,7 +95,7 @@ class LLMRequestOff:
max_retries = 3
base_wait_time = 15
async with aiohttp.ClientSession() as session:
async with aiohttp.ClientSession(connector=await get_tcp_connector()) as session:
for retry in range(max_retries):
try:
async with session.post(api_url, headers=headers, json=data) as response:

View File

@@ -13,6 +13,7 @@ import os
from src.common.database.database import db # 确保 db 被导入用于 create_tables
from src.common.database.database_model import LLMUsage # 导入 LLMUsage 模型
from src.config.config import global_config
from src.common.tcp_connector import get_tcp_connector
from rich.traceback import install
install(extra_lines=3)
@@ -264,7 +265,6 @@ class LLMRequest:
if self.model_name.lower() in self.MODELS_NEEDING_TRANSFORMATION and "max_tokens" in payload:
payload["max_completion_tokens"] = payload.pop("max_tokens")
return {
"policy": policy,
"payload": payload,
@@ -312,7 +312,7 @@ class LLMRequest:
# 似乎是openai流式必须要的东西,不过阿里云的qwq-plus加了这个没有影响
if request_content["stream_mode"]:
headers["Accept"] = "text/event-stream"
async with aiohttp.ClientSession() as session:
async with aiohttp.ClientSession(connector=await get_tcp_connector()) as session:
async with session.post(
request_content["api_url"], headers=headers, json=request_content["payload"]
) as response: