fix:移除了部分token限制
This commit is contained in:
@@ -89,7 +89,6 @@ class ReplyGenerator:
|
||||
self.llm = LLMRequest(
|
||||
model=global_config.llm_PFC_chat,
|
||||
temperature=global_config.llm_PFC_chat["temp"],
|
||||
max_tokens=300,
|
||||
request_type="reply_generation",
|
||||
)
|
||||
self.personality_info = individuality.get_prompt(x_person=2, level=3)
|
||||
|
||||
Reference in New Issue
Block a user