diff --git a/src/chat/planner_actions/planner.py b/src/chat/planner_actions/planner.py index 593a458e0..c2255c26b 100644 --- a/src/chat/planner_actions/planner.py +++ b/src/chat/planner_actions/planner.py @@ -160,9 +160,8 @@ class ActionPlanner: model_set=model_config.model_task_config.planner, request_type="planner" ) # --- 小脑 (新增) --- - # TODO: 可以在 model_config.toml 中为 planner_small 单独配置一个轻量级模型 self.planner_small_llm = LLMRequest( - model_set=model_config.model_task_config.planner, request_type="planner_small" + model_set=model_config.model_task_config.planner_small, request_type="planner_small" ) self.last_obs_time_mark = 0.0 @@ -496,8 +495,7 @@ class ActionPlanner: if sub_planner_actions: sub_planner_actions_num = len(sub_planner_actions) - # TODO: 您可以在 config.toml 的 [chat] 部分添加 planner_size = 5.0 来自定义此值 - planner_size_config = getattr(global_config.chat, "planner_size", 5.0) + planner_size_config = global_config.chat.planner_size sub_planner_size = int(planner_size_config) + ( 1 if random.random() < planner_size_config - int(planner_size_config) else 0 ) diff --git a/src/config/api_ada_configs.py b/src/config/api_ada_configs.py index b74f1b558..5e53eec4b 100644 --- a/src/config/api_ada_configs.py +++ b/src/config/api_ada_configs.py @@ -113,6 +113,7 @@ class ModelTaskConfig(ValidatedConfigBase): voice: TaskConfig = Field(..., description="语音识别模型配置") tool_use: TaskConfig = Field(..., description="专注工具使用模型配置") planner: TaskConfig = Field(..., description="规划模型配置") + planner_small: TaskConfig = Field(..., description="小脑(sub-planner)规划模型配置") embedding: TaskConfig = Field(..., description="嵌入模型配置") lpmm_entity_extract: TaskConfig = Field(..., description="LPMM实体提取模型配置") lpmm_rdf_build: TaskConfig = Field(..., description="LPMM RDF构建模型配置") @@ -147,9 +148,9 @@ class ModelTaskConfig(ValidatedConfigBase): class APIAdapterConfig(ValidatedConfigBase): """API Adapter配置类""" - models: List[ModelInfo] = Field(..., min_items=1, description="模型列表") + models: List[ModelInfo] = Field(..., min_length=1, description="模型列表") model_task_config: ModelTaskConfig = Field(..., description="模型任务配置") - api_providers: List[APIProvider] = Field(..., min_items=1, description="API提供商列表") + api_providers: List[APIProvider] = Field(..., min_length=1, description="API提供商列表") def __init__(self, **data): super().__init__(**data) diff --git a/src/config/official_configs.py b/src/config/official_configs.py index 3989e246c..3a8b46d03 100644 --- a/src/config/official_configs.py +++ b/src/config/official_configs.py @@ -92,6 +92,7 @@ class ChatConfig(ValidatedConfigBase): default_factory=list, description="启用主动思考的群聊范围,格式:platform:group_id,为空则不限制" ) delta_sigma: int = Field(default=120, description="采用正态分布随机时间间隔") + planner_size: float = Field(default=5.0, ge=1.0, description="小脑(sub-planner)的尺寸,决定每个小脑处理多少个action") def get_current_talk_frequency(self, chat_stream_id: Optional[str] = None) -> float: """ diff --git a/template/bot_config_template.toml b/template/bot_config_template.toml index 89c94fe86..798065b6c 100644 --- a/template/bot_config_template.toml +++ b/template/bot_config_template.toml @@ -179,6 +179,9 @@ delta_sigma = 120 # 正态分布的标准差,控制时间间隔的随机程度 # 实验建议:试试 proactive_thinking_interval=0 + delta_sigma 非常大 的纯随机模式! # 结果保证:生成的间隔永远为正数(负数会取绝对值),最小1秒,最大24小时 +# --- 大脑/小脑 Planner 配置 --- +planner_size = 5.0 # 小脑(sub-planner)的尺寸,决定每个小脑处理多少个action。数值越小,并行度越高,但单个小脑的上下文越少。建议范围:3.0-8.0 + [relationship] enable_relationship = true # 是否启用关系系统 relation_frequency = 1 # 关系频率,MoFox-Bot构建关系的频率 diff --git a/template/model_config_template.toml b/template/model_config_template.toml index 0c1783143..fab3ee509 100644 --- a/template/model_config_template.toml +++ b/template/model_config_template.toml @@ -1,5 +1,5 @@ [inner] -version = "1.3.0" +version = "1.3.1" # 配置文件版本号迭代规则同bot_config.toml @@ -142,6 +142,11 @@ model_list = ["siliconflow-deepseek-v3"] temperature = 0.3 max_tokens = 800 +[model_task_config.planner_small] #决策(小脑):负责决定具体action的模型,建议使用速度快的小模型 +model_list = ["qwen3-30b"] +temperature = 0.5 +max_tokens = 800 + [model_task_config.emotion] #负责麦麦的情绪变化 model_list = ["siliconflow-deepseek-v3"] temperature = 0.3