fix: remove token

This commit is contained in:
SengokuCola
2025-06-09 00:33:28 +08:00
parent bd7c25b26a
commit 16a704e01f
4 changed files with 0 additions and 4 deletions

View File

@@ -94,7 +94,6 @@ class RelationshipProcessor(BaseProcessor):
self.llm_model = LLMRequest( self.llm_model = LLMRequest(
model=global_config.model.relation, model=global_config.model.relation,
max_tokens=800,
request_type="focus.relationship", request_type="focus.relationship",
) )

View File

@@ -56,7 +56,6 @@ class SelfProcessor(BaseProcessor):
self.llm_model = LLMRequest( self.llm_model = LLMRequest(
model=global_config.model.relation, model=global_config.model.relation,
max_tokens=800,
request_type="focus.processor.self_identify", request_type="focus.processor.self_identify",
) )

View File

@@ -43,7 +43,6 @@ class ToolProcessor(BaseProcessor):
self.log_prefix = f"[{subheartflow_id}:ToolExecutor] " self.log_prefix = f"[{subheartflow_id}:ToolExecutor] "
self.llm_model = LLMRequest( self.llm_model = LLMRequest(
model=global_config.model.focus_tool_use, model=global_config.model.focus_tool_use,
max_tokens=500,
request_type="focus.processor.tool", request_type="focus.processor.tool",
) )
self.structured_info = [] self.structured_info = []

View File

@@ -61,7 +61,6 @@ class WorkingMemoryProcessor(BaseProcessor):
self.llm_model = LLMRequest( self.llm_model = LLMRequest(
model=global_config.model.planner, model=global_config.model.planner,
max_tokens=800,
request_type="focus.processor.working_memory", request_type="focus.processor.working_memory",
) )