fix: 增大了默认的maxtoken防止溢出,messagecq改异步get_image防止阻塞

This commit is contained in:
tcmofashi
2025-03-14 15:38:33 +08:00
parent e2c5d42634
commit d3fe02e467
7 changed files with 207 additions and 286 deletions

View File

@@ -37,7 +37,7 @@ class EmojiManager:
self._scan_task = None
self.vlm = LLM_request(model=global_config.vlm, temperature=0.3, max_tokens=1000)
self.llm_emotion_judge = LLM_request(
model=global_config.llm_emotion_judge, max_tokens=60, temperature=0.8
model=global_config.llm_emotion_judge, max_tokens=600, temperature=0.8
) # 更高的温度更少的token后续可以根据情绪来调整温度
def _ensure_emoji_dir(self):
@@ -275,9 +275,6 @@ class EmojiManager:
continue
logger.info(f"check通过 {check}")
if description is not None:
embedding = await get_embedding(description)
if description is not None:
embedding = await get_embedding(description)