fix:移除了部分token限制

This commit is contained in:
SengokuCola
2025-06-09 00:32:30 +08:00
parent 79405d1871
commit 1e51717796
14 changed files with 7 additions and 24 deletions

View File

@@ -110,7 +110,6 @@ class ActionPlanner:
self.llm = LLMRequest(
model=global_config.llm_PFC_action_planner,
temperature=global_config.llm_PFC_action_planner["temp"],
max_tokens=1500,
request_type="action_planning",
)
self.personality_info = individuality.get_prompt(x_person=2, level=3)

View File

@@ -89,7 +89,6 @@ class ReplyGenerator:
self.llm = LLMRequest(
model=global_config.llm_PFC_chat,
temperature=global_config.llm_PFC_chat["temp"],
max_tokens=300,
request_type="reply_generation",
)
self.personality_info = individuality.get_prompt(x_person=2, level=3)