Merge remote-tracking branch 'upstream/debug' into debug

This commit is contained in:
tcmofashi
2025-03-07 06:45:37 +08:00
5 changed files with 8 additions and 5 deletions

4
.gitignore vendored
View File

@@ -188,3 +188,7 @@ cython_debug/
# jieba
jieba.cache
# vscode
/.vscode

4
bot.py
View File

@@ -17,11 +17,11 @@ print(rainbow_text)
'''彩蛋'''
# 初次启动检测
if not os.path.exists("config/bot_config.toml") or not os.path.exists(".env"):
if not os.path.exists("config/bot_config.toml"):
logger.info("检测到bot_config.toml不存在正在从模板复制")
import shutil
shutil.copy("config/bot_config_template.toml", "config/bot_config.toml")
shutil.copy("templete/bot_config_template.toml", "config/bot_config.toml")
logger.info("复制完成请修改config/bot_config.toml和.env.prod中的配置后重新启动")
# 初始化.env 默认ENVIRONMENT=prod

View File

@@ -213,12 +213,11 @@ class LLM_request:
)
return content, reasoning_content
async def get_embedding(self, text: str, model: str = "BAAI/bge-m3") -> Union[list, None]:
async def get_embedding(self, text: str) -> Union[list, None]:
"""异步方法获取文本的embedding向量
Args:
text: 需要获取embedding的文本
model: 使用的模型名称,默认为"BAAI/bge-m3"
Returns:
list: embedding向量如果失败则返回None
@@ -233,7 +232,7 @@ class LLM_request:
endpoint="/embeddings",
prompt=text,
payload={
"model": model,
"model": self.model_name,
"input": text,
"encoding_format": "float"
},