feat: 增加steam控制字段,修复qwq不能工作的问题

This commit is contained in:
Rikki
2025-03-30 05:00:11 +08:00
parent b2fc824afd
commit 362dda1ab3
3 changed files with 14 additions and 5 deletions

View File

@@ -309,13 +309,17 @@ class BotConfig:
# base_url 的例子: SILICONFLOW_BASE_URL
# key 的例子: SILICONFLOW_KEY
cfg_target = {"name": "", "base_url": "", "key": "", "pri_in": 0, "pri_out": 0}
cfg_target = {"name": "", "base_url": "", "key": "", "stream": False, "pri_in": 0, "pri_out": 0}
if config.INNER_VERSION in SpecifierSet("<=0.0.0"):
cfg_target = cfg_item
elif config.INNER_VERSION in SpecifierSet(">=0.0.1"):
stable_item = ["name", "pri_in", "pri_out"]
if config.INNER_VERSION in SpecifierSet(">=1.0.1"):
stable_item.append("stream")
pricing_item = ["pri_in", "pri_out"]
# 从配置中原始拷贝稳定字段
for i in stable_item:

View File

@@ -12,8 +12,6 @@ import io
import os
from ...common.database import db
from ..config.config import global_config
from ..config.config_env import env_config
logger = get_module_logger("model_utils")
@@ -42,6 +40,7 @@ class LLM_request:
self.model_name = model["name"]
self.params = kwargs
self.stream = model.get("stream", False)
self.pri_in = model.get("pri_in", 0)
self.pri_out = model.get("pri_out", 0)
@@ -175,7 +174,7 @@ class LLM_request:
api_url = f"{self.base_url.rstrip('/')}/{endpoint.lstrip('/')}"
# 判断是否为流式
stream_mode = self.params.get("stream", False)
stream_mode = self.stream
# logger_msg = "进入流式输出模式," if stream_mode else ""
# logger.debug(f"{logger_msg}发送请求到URL: {api_url}")
# logger.info(f"使用模型: {self.model_name}")

View File

@@ -1,5 +1,5 @@
[inner]
version = "1.0.0"
version = "1.0.1"
[mai_version]
version = "0.6.0"
@@ -149,6 +149,12 @@ enable_think_flow = false # 是否启用思维流 注意:可能会消耗大量
#下面的模型若使用硅基流动则不需要更改使用ds官方则改成.env自定义的宏使用自定义模型则选择定位相似的模型自己填写
#推理模型
# 额外字段
# 下面的模型有以下额外字段可以添加:
# stream = <true|false> : 用于指定模型是否是使用流式输出
# 如果不指定,则该项是 False
[model.llm_reasoning] #回复模型1 主要回复模型
name = "Pro/deepseek-ai/DeepSeek-R1"
# name = "Qwen/QwQ-32B"