From ab259c20aa6a9e190078c0f7de85b0c3485ee050 Mon Sep 17 00:00:00 2001 From: tcmofashi Date: Wed, 5 Mar 2025 22:04:36 +0800 Subject: [PATCH] =?UTF-8?q?fix:=20=E5=A2=9E=E5=BC=BA=E6=83=85=E6=84=9F?= =?UTF-8?q?=E6=A0=87=E7=AD=BE=E7=94=9F=E6=88=90=E7=9A=84=E9=B2=81=E6=A3=92?= =?UTF-8?q?=E6=80=A7?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/plugins/chat/llm_generator.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/src/plugins/chat/llm_generator.py b/src/plugins/chat/llm_generator.py index 034ff7348..f380d62f4 100644 --- a/src/plugins/chat/llm_generator.py +++ b/src/plugins/chat/llm_generator.py @@ -138,9 +138,12 @@ class ResponseGenerator: 内容:{content} 输出: ''' - content, _ = await self.model_v3.generate_response(prompt) - return [content.strip()] if content else ["neutral"] + content=content.strip() + if content in ['happy','angry','sad','surprised','disgusted','fearful','neutral']: + return [content] + else: + return ["neutral"] except Exception as e: print(f"获取情感标签时出错: {e}")