fix: 增大了默认的maxtoken防止溢出,messagecq改异步get_image防止阻塞
This commit is contained in:
@@ -37,7 +37,7 @@ class EmojiManager:
|
||||
self._scan_task = None
|
||||
self.vlm = LLM_request(model=global_config.vlm, temperature=0.3, max_tokens=1000)
|
||||
self.llm_emotion_judge = LLM_request(
|
||||
model=global_config.llm_emotion_judge, max_tokens=60, temperature=0.8
|
||||
model=global_config.llm_emotion_judge, max_tokens=600, temperature=0.8
|
||||
) # 更高的温度,更少的token(后续可以根据情绪来调整温度)
|
||||
|
||||
def _ensure_emoji_dir(self):
|
||||
@@ -275,9 +275,6 @@ class EmojiManager:
|
||||
continue
|
||||
logger.info(f"check通过 {check}")
|
||||
|
||||
if description is not None:
|
||||
embedding = await get_embedding(description)
|
||||
|
||||
if description is not None:
|
||||
embedding = await get_embedding(description)
|
||||
|
||||
|
||||
Reference in New Issue
Block a user