diff --git a/src/plugins/schedule/schedule_generator.py b/src/plugins/schedule/schedule_generator.py index 13b6ebb88..b1d07214a 100644 --- a/src/plugins/schedule/schedule_generator.py +++ b/src/plugins/schedule/schedule_generator.py @@ -29,7 +29,10 @@ Database.initialize( class ScheduleGenerator: def __init__(self): - self.llm_scheduler = LLMModel(model_name="Pro/deepseek-ai/DeepSeek-V3") + if global_config.API_USING == "siliconflow": + self.llm_scheduler = LLMModel(model_name="Pro/deepseek-ai/DeepSeek-V3") + elif global_config.API_USING == "deepseek": + self.llm_scheduler = LLMModel(model_name="deepseek-chat",api_using="deepseek") self.db = Database.get_instance() today = datetime.datetime.now() diff --git a/src/plugins/schedule/schedule_llm_module.py b/src/plugins/schedule/schedule_llm_module.py index 0f1e71f6c..13945afb3 100644 --- a/src/plugins/schedule/schedule_llm_module.py +++ b/src/plugins/schedule/schedule_llm_module.py @@ -8,11 +8,19 @@ load_dotenv() class LLMModel: # def __init__(self, model_name="deepseek-ai/DeepSeek-R1-Distill-Qwen-32B", **kwargs): - def __init__(self, model_name="Pro/deepseek-ai/DeepSeek-R1", **kwargs): - self.model_name = model_name + def __init__(self, model_name="Pro/deepseek-ai/DeepSeek-R1",api_using=None, **kwargs): + if api_using == "deepseek": + self.api_key = os.getenv("DEEPSEEK_API_KEY") + self.base_url = os.getenv("DEEPSEEK_BASE_URL") + if model_name != "Pro/deepseek-ai/DeepSeek-R1": + self.model_name = model_name + else: + self.model_name = "deepseek-reasoner" + else: + self.api_key = os.getenv("SILICONFLOW_KEY") + self.base_url = os.getenv("SILICONFLOW_BASE_URL") + self.model_name = model_name self.params = kwargs - self.api_key = os.getenv("SILICONFLOW_KEY") - self.base_url = os.getenv("SILICONFLOW_BASE_URL") def generate_response(self, prompt: str) -> Tuple[str, str]: """根据输入的提示生成模型的响应"""