fix:移除了部分token限制
This commit is contained in:
@@ -19,19 +19,15 @@ class NormalChatGenerator:
|
||||
# TODO: API-Adapter修改标记
|
||||
self.model_reasoning = LLMRequest(
|
||||
model=global_config.model.replyer_1,
|
||||
# temperature=0.7,
|
||||
max_tokens=3000,
|
||||
request_type="normal.chat_1",
|
||||
)
|
||||
self.model_normal = LLMRequest(
|
||||
model=global_config.model.replyer_2,
|
||||
# temperature=global_config.model.replyer_2["temp"],
|
||||
max_tokens=256,
|
||||
request_type="normal.chat_2",
|
||||
)
|
||||
|
||||
self.model_sum = LLMRequest(
|
||||
model=global_config.model.memory_summary, temperature=0.7, max_tokens=3000, request_type="relation"
|
||||
model=global_config.model.memory_summary, temperature=0.7, request_type="relation"
|
||||
)
|
||||
self.current_model_type = "r1" # 默认使用 R1
|
||||
self.current_model_name = "unknown model"
|
||||
|
||||
Reference in New Issue
Block a user