Merge branch 'dev' of https://github.com/MaiM-with-u/MaiBot into dev
This commit is contained in:
8
.github/workflows/ruff.yml
vendored
8
.github/workflows/ruff.yml
vendored
@@ -23,12 +23,14 @@ jobs:
|
||||
with:
|
||||
fetch-depth: 0
|
||||
ref: ${{ github.head_ref || github.ref_name }}
|
||||
- name: Install the latest version of ruff
|
||||
- name: Install Ruff and Run Checks
|
||||
uses: astral-sh/ruff-action@v3
|
||||
with:
|
||||
version: "latest"
|
||||
- run: ruff check --fix
|
||||
- run: ruff format
|
||||
- name: Run Ruff Fix
|
||||
run: ruff check --fix
|
||||
- name: Run Ruff Format
|
||||
run: ruff format
|
||||
- name: Commit changes
|
||||
if: success()
|
||||
run: |
|
||||
|
||||
1
MaiMBot-LPMM
Submodule
1
MaiMBot-LPMM
Submodule
Submodule MaiMBot-LPMM added at d5824d2f48
@@ -23,8 +23,8 @@ services:
|
||||
# image: infinitycat/maibot:dev
|
||||
environment:
|
||||
- TZ=Asia/Shanghai
|
||||
# - EULA_AGREE=35362b6ea30f12891d46ef545122e84a # 同意EULA
|
||||
# - PRIVACY_AGREE=2402af06e133d2d10d9c6c643fdc9333 # 同意EULA
|
||||
# - EULA_AGREE=bda99dca873f5d8044e9987eac417e01 # 同意EULA
|
||||
# - PRIVACY_AGREE=42dddb3cbe2b784b45a2781407b298a1 # 同意EULA
|
||||
# ports:
|
||||
# - "8000:8000"
|
||||
volumes:
|
||||
|
||||
@@ -1076,7 +1076,7 @@ def get_module_logger(
|
||||
# 文件处理器
|
||||
log_dir = Path(current_config["log_dir"])
|
||||
log_dir.mkdir(parents=True, exist_ok=True)
|
||||
log_file = log_dir / module_name / "{time:YYYY-MM-DD}.log"
|
||||
log_file = log_dir / "{time:YYYY-MM-DD}.log"
|
||||
log_file.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
file_id = logger.add(
|
||||
|
||||
9
src/common/tcp_connector.py
Normal file
9
src/common/tcp_connector.py
Normal file
@@ -0,0 +1,9 @@
|
||||
import ssl
|
||||
import certifi
|
||||
import aiohttp
|
||||
|
||||
ssl_context = ssl.create_default_context(cafile=certifi.where())
|
||||
|
||||
|
||||
async def get_tcp_connector():
|
||||
return aiohttp.TCPConnector(ssl=ssl_context)
|
||||
@@ -6,6 +6,7 @@ from typing import Tuple, Union
|
||||
import aiohttp
|
||||
import requests
|
||||
from src.common.logger import get_module_logger
|
||||
from src.common.tcp_connector import get_tcp_connector
|
||||
from rich.traceback import install
|
||||
|
||||
install(extra_lines=3)
|
||||
@@ -94,7 +95,7 @@ class LLMRequestOff:
|
||||
max_retries = 3
|
||||
base_wait_time = 15
|
||||
|
||||
async with aiohttp.ClientSession() as session:
|
||||
async with aiohttp.ClientSession(connector=await get_tcp_connector()) as session:
|
||||
for retry in range(max_retries):
|
||||
try:
|
||||
async with session.post(api_url, headers=headers, json=data) as response:
|
||||
|
||||
@@ -13,6 +13,7 @@ import os
|
||||
from src.common.database.database import db # 确保 db 被导入用于 create_tables
|
||||
from src.common.database.database_model import LLMUsage # 导入 LLMUsage 模型
|
||||
from src.config.config import global_config
|
||||
from src.common.tcp_connector import get_tcp_connector
|
||||
from rich.traceback import install
|
||||
|
||||
install(extra_lines=3)
|
||||
@@ -264,7 +265,6 @@ class LLMRequest:
|
||||
if self.model_name.lower() in self.MODELS_NEEDING_TRANSFORMATION and "max_tokens" in payload:
|
||||
payload["max_completion_tokens"] = payload.pop("max_tokens")
|
||||
|
||||
|
||||
return {
|
||||
"policy": policy,
|
||||
"payload": payload,
|
||||
@@ -312,7 +312,7 @@ class LLMRequest:
|
||||
# 似乎是openai流式必须要的东西,不过阿里云的qwq-plus加了这个没有影响
|
||||
if request_content["stream_mode"]:
|
||||
headers["Accept"] = "text/event-stream"
|
||||
async with aiohttp.ClientSession() as session:
|
||||
async with aiohttp.ClientSession(connector=await get_tcp_connector()) as session:
|
||||
async with session.post(
|
||||
request_content["api_url"], headers=headers, json=request_content["payload"]
|
||||
) as response:
|
||||
|
||||
Reference in New Issue
Block a user