fix:修改模型名称,移除chat_mind处理器

This commit is contained in:
SengokuCola
2025-06-03 19:43:54 +08:00
parent 51f5e610d7
commit f94f14cce2
9 changed files with 25 additions and 48 deletions

View File

@@ -68,8 +68,7 @@ class MindProcessor(BaseProcessor):
self.subheartflow_id = subheartflow_id
self.llm_model = LLMRequest(
model=global_config.model.focus_chat_mind,
# temperature=global_config.model.focus_chat_mind["temp"],
model=global_config.model.planner,
max_tokens=800,
request_type="focus.processor.chat_mind",
)

View File

@@ -49,8 +49,7 @@ class RelationshipProcessor(BaseProcessor):
self.subheartflow_id = subheartflow_id
self.llm_model = LLMRequest(
model=global_config.model.focus_self_recognize,
temperature=global_config.model.focus_self_recognize["temp"],
model=global_config.model.relation,
max_tokens=800,
request_type="focus.processor.self_identify",
)

View File

@@ -51,8 +51,7 @@ class SelfProcessor(BaseProcessor):
self.subheartflow_id = subheartflow_id
self.llm_model = LLMRequest(
model=global_config.model.focus_self_recognize,
temperature=global_config.model.focus_self_recognize["temp"],
model=global_config.model.relation,
max_tokens=800,
request_type="focus.processor.self_identify",
)

View File

@@ -60,8 +60,7 @@ class WorkingMemoryProcessor(BaseProcessor):
self.subheartflow_id = subheartflow_id
self.llm_model = LLMRequest(
model=global_config.model.focus_chat_mind,
temperature=global_config.model.focus_chat_mind["temp"],
model=global_config.model.planner,
max_tokens=800,
request_type="focus.processor.working_memory",
)

View File

@@ -165,7 +165,7 @@ class FocusChatConfig(ConfigBase):
class FocusChatProcessorConfig(ConfigBase):
"""专注聊天处理器配置类"""
mind_processor: bool = True
mind_processor: bool = False
"""是否启用思维处理器"""
self_identify_processor: bool = True
@@ -180,9 +180,6 @@ class FocusChatProcessorConfig(ConfigBase):
working_memory_processor: bool = True
"""是否启用工作记忆处理器"""
lite_chat_mind_processor: bool = False
"""是否启用轻量级聊天思维处理器可以节省token消耗和时间"""
@dataclass
class ExpressionConfig(ConfigBase):
@@ -445,11 +442,6 @@ class ModelConfig(ConfigBase):
focus_working_memory: dict[str, Any] = field(default_factory=lambda: {})
"""专注工作记忆模型配置"""
focus_chat_mind: dict[str, Any] = field(default_factory=lambda: {})
"""专注聊天规划模型配置"""
focus_self_recognize: dict[str, Any] = field(default_factory=lambda: {})
"""专注自我识别模型配置"""
focus_tool_use: dict[str, Any] = field(default_factory=lambda: {})
"""专注工具使用模型配置"""
@@ -457,6 +449,9 @@ class ModelConfig(ConfigBase):
planner: dict[str, Any] = field(default_factory=lambda: {})
"""规划模型配置"""
relation: dict[str, Any] = field(default_factory=lambda: {})
"""关系模型配置"""
focus_expressor: dict[str, Any] = field(default_factory=lambda: {})
"""专注表达器模型配置"""

View File

@@ -17,8 +17,8 @@ class ImpressionUpdateTask(AsyncTask):
def __init__(self):
super().__init__(
task_name="impression_update",
wait_before_start=10, # 启动后等待10秒
run_interval=30 # 每1分钟运行一次
wait_before_start=2, # 启动后等待10秒
run_interval=20 # 每1分钟运行一次
)
async def run(self):
@@ -27,7 +27,7 @@ class ImpressionUpdateTask(AsyncTask):
# 获取最近10分钟的消息
current_time = int(time.time())
start_time = current_time - 600 # 10分钟前
start_time = current_time - 6000 # 10分钟前
logger.debug(f"获取时间范围: {start_time} -> {current_time}")
# 获取所有消息

View File

@@ -320,19 +320,19 @@ class RelationshipManager:
messages_before = get_raw_msg_by_timestamp_with_chat(
chat_id=chat_id,
timestamp_start=timestamp - 600, # 前10分钟
timestamp_start=timestamp - 6000, # 前10分钟
timestamp_end=timestamp,
# person_ids=[user_id],
limit=200,
limit=100,
limit_mode="latest"
)
messages_after = get_raw_msg_by_timestamp_with_chat(
chat_id=chat_id,
timestamp_start=timestamp,
timestamp_end=timestamp + 600, # 后10分钟
timestamp_end=timestamp + 6000, # 后10分钟
# person_ids=[user_id],
limit=200,
limit=100,
limit_mode="earliest"
)