fix:移除了部分token限制

This commit is contained in:
SengokuCola
2025-06-09 00:32:30 +08:00
parent 79405d1871
commit 1e51717796
14 changed files with 7 additions and 24 deletions

View File

@@ -19,19 +19,15 @@ class NormalChatGenerator:
# TODO: API-Adapter修改标记
self.model_reasoning = LLMRequest(
model=global_config.model.replyer_1,
# temperature=0.7,
max_tokens=3000,
request_type="normal.chat_1",
)
self.model_normal = LLMRequest(
model=global_config.model.replyer_2,
# temperature=global_config.model.replyer_2["temp"],
max_tokens=256,
request_type="normal.chat_2",
)
self.model_sum = LLMRequest(
model=global_config.model.memory_summary, temperature=0.7, max_tokens=3000, request_type="relation"
model=global_config.model.memory_summary, temperature=0.7, request_type="relation"
)
self.current_model_type = "r1" # 默认使用 R1
self.current_model_name = "unknown model"