fix:修改模型名称,移除chat_mind处理器

This commit is contained in:
SengokuCola
2025-06-03 19:43:54 +08:00
parent 51f5e610d7
commit f94f14cce2
9 changed files with 25 additions and 48 deletions

View File

@@ -68,8 +68,7 @@ class MindProcessor(BaseProcessor):
self.subheartflow_id = subheartflow_id
self.llm_model = LLMRequest(
model=global_config.model.focus_chat_mind,
# temperature=global_config.model.focus_chat_mind["temp"],
model=global_config.model.planner,
max_tokens=800,
request_type="focus.processor.chat_mind",
)

View File

@@ -49,8 +49,7 @@ class RelationshipProcessor(BaseProcessor):
self.subheartflow_id = subheartflow_id
self.llm_model = LLMRequest(
model=global_config.model.focus_self_recognize,
temperature=global_config.model.focus_self_recognize["temp"],
model=global_config.model.relation,
max_tokens=800,
request_type="focus.processor.self_identify",
)

View File

@@ -51,8 +51,7 @@ class SelfProcessor(BaseProcessor):
self.subheartflow_id = subheartflow_id
self.llm_model = LLMRequest(
model=global_config.model.focus_self_recognize,
temperature=global_config.model.focus_self_recognize["temp"],
model=global_config.model.relation,
max_tokens=800,
request_type="focus.processor.self_identify",
)

View File

@@ -60,8 +60,7 @@ class WorkingMemoryProcessor(BaseProcessor):
self.subheartflow_id = subheartflow_id
self.llm_model = LLMRequest(
model=global_config.model.focus_chat_mind,
temperature=global_config.model.focus_chat_mind["temp"],
model=global_config.model.planner,
max_tokens=800,
request_type="focus.processor.working_memory",
)