From 2554f8a931bba90a3d46dd463cf05e1336dc3692 Mon Sep 17 00:00:00 2001 From: KawaiiYusora Date: Sat, 1 Mar 2025 18:52:40 +0800 Subject: [PATCH] =?UTF-8?q?=E4=BF=AE=E4=B8=80=E4=B8=8B=E6=BC=8F=E6=94=B9?= =?UTF-8?q?=E7=9A=84deepseek=20API=E6=94=AF=E6=8C=81?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/plugins/schedule/schedule_generator.py | 5 ++++- src/plugins/schedule/schedule_llm_module.py | 16 ++++++++++++---- 2 files changed, 16 insertions(+), 5 deletions(-) diff --git a/src/plugins/schedule/schedule_generator.py b/src/plugins/schedule/schedule_generator.py index 13b6ebb88..b1d07214a 100644 --- a/src/plugins/schedule/schedule_generator.py +++ b/src/plugins/schedule/schedule_generator.py @@ -29,7 +29,10 @@ Database.initialize( class ScheduleGenerator: def __init__(self): - self.llm_scheduler = LLMModel(model_name="Pro/deepseek-ai/DeepSeek-V3") + if global_config.API_USING == "siliconflow": + self.llm_scheduler = LLMModel(model_name="Pro/deepseek-ai/DeepSeek-V3") + elif global_config.API_USING == "deepseek": + self.llm_scheduler = LLMModel(model_name="deepseek-chat",api_using="deepseek") self.db = Database.get_instance() today = datetime.datetime.now() diff --git a/src/plugins/schedule/schedule_llm_module.py b/src/plugins/schedule/schedule_llm_module.py index 0f1e71f6c..13945afb3 100644 --- a/src/plugins/schedule/schedule_llm_module.py +++ b/src/plugins/schedule/schedule_llm_module.py @@ -8,11 +8,19 @@ load_dotenv() class LLMModel: # def __init__(self, model_name="deepseek-ai/DeepSeek-R1-Distill-Qwen-32B", **kwargs): - def __init__(self, model_name="Pro/deepseek-ai/DeepSeek-R1", **kwargs): - self.model_name = model_name + def __init__(self, model_name="Pro/deepseek-ai/DeepSeek-R1",api_using=None, **kwargs): + if api_using == "deepseek": + self.api_key = os.getenv("DEEPSEEK_API_KEY") + self.base_url = os.getenv("DEEPSEEK_BASE_URL") + if model_name != "Pro/deepseek-ai/DeepSeek-R1": + self.model_name = model_name + else: + self.model_name = "deepseek-reasoner" + else: + self.api_key = os.getenv("SILICONFLOW_KEY") + self.base_url = os.getenv("SILICONFLOW_BASE_URL") + self.model_name = model_name self.params = kwargs - self.api_key = os.getenv("SILICONFLOW_KEY") - self.base_url = os.getenv("SILICONFLOW_BASE_URL") def generate_response(self, prompt: str) -> Tuple[str, str]: """根据输入的提示生成模型的响应"""