diff --git a/config/bot_config_template.toml b/config/bot_config_template.toml index f0c7d52cb..5ad837f6d 100644 --- a/config/bot_config_template.toml +++ b/config/bot_config_template.toml @@ -89,7 +89,7 @@ key = "SILICONFLOW_KEY" # 主题提取,jieba和snownlp不用api,llm需要api [topic] -topic='llm' # 只支持jieba,snownlp,llm三种选项 +topic_extract='snownlp' # 只支持jieba,snownlp,llm三种选项 [topic.llm_topic] name = "Pro/deepseek-ai/DeepSeek-V3" diff --git a/src/plugins/chat/config.py b/src/plugins/chat/config.py index 24cf12925..a74a668a1 100644 --- a/src/plugins/chat/config.py +++ b/src/plugins/chat/config.py @@ -41,8 +41,10 @@ class BotConfig: llm_normal_minor: Dict[str, str] = field(default_factory=lambda: {}) embedding: Dict[str, str] = field(default_factory=lambda: {}) vlm: Dict[str, str] = field(default_factory=lambda: {}) + + # 主题提取配置 topic_extract: str = 'snownlp' # 只支持jieba,snownlp,llm - llm_topic_extract=llm_normal_minor + llm_topic_extract: Dict[str, str] = field(default_factory=lambda: {}) API_USING: str = "siliconflow" # 使用的API API_PAID: bool = False # 是否使用付费API @@ -124,6 +126,7 @@ class BotConfig: if "llm_normal" in model_config: config.llm_normal = model_config["llm_normal"] + config.llm_topic_extract = config.llm_normal if "llm_normal_minor" in model_config: config.llm_normal_minor = model_config["llm_normal_minor"]