From a445c222505cd7ff724ffdea93a0ba2ac6eb2d5b Mon Sep 17 00:00:00 2001 From: KawaiiYusora Date: Thu, 6 Mar 2025 01:08:26 +0800 Subject: [PATCH] =?UTF-8?q?=E4=B9=8B=E6=88=91=E7=9A=84LLM=E4=B8=BA?= =?UTF-8?q?=E4=BB=80=E4=B9=88=E5=8F=AA=E6=9C=89=E4=B8=80=E5=8D=8ATAG?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/plugins/models/utils_model.py | 23 +++++++++++++---------- 1 file changed, 13 insertions(+), 10 deletions(-) diff --git a/src/plugins/models/utils_model.py b/src/plugins/models/utils_model.py index 3ba873d74..c7dbc6ffd 100644 --- a/src/plugins/models/utils_model.py +++ b/src/plugins/models/utils_model.py @@ -36,6 +36,7 @@ class LLM_request: data = { "model": self.model_name, "messages": [{"role": "user", "content": prompt}], + "max_tokens": 8000, **self.params } @@ -65,10 +66,10 @@ class LLM_request: think_match = None reasoning_content = message.get("reasoning_content", "") if not reasoning_content: - think_match = re.search(r'(.*?)', content, re.DOTALL) + think_match = re.search(r'(?:)?(.*?)', content, re.DOTALL) if think_match: reasoning_content = think_match.group(1).strip() - content = re.sub(r'.*?', '', content, flags=re.DOTALL).strip() + content = re.sub(r'(?:)?.*?', '', content, flags=re.DOTALL, count=1).strip() return content, reasoning_content return "没有返回结果", "" @@ -112,9 +113,10 @@ class LLM_request: ] } ], + "max_tokens": 8000, **self.params } - + # 发送请求到完整的chat/completions端点 api_url = f"{self.base_url.rstrip('/')}/chat/completions" @@ -122,9 +124,9 @@ class LLM_request: max_retries = 3 base_wait_time = 15 - + current_image_base64 = image_base64 - + for retry in range(max_retries): try: @@ -141,7 +143,7 @@ class LLM_request: logger.warning("图片太大(413),尝试压缩...") current_image_base64 = compress_base64_image_by_scale(current_image_base64) continue - + response.raise_for_status() # 检查其他响应状态 result = await response.json() @@ -151,10 +153,10 @@ class LLM_request: think_match = None reasoning_content = message.get("reasoning_content", "") if not reasoning_content: - think_match = re.search(r'(.*?)', content, re.DOTALL) + think_match = re.search(r'(?:)?(.*?)', content, re.DOTALL) if think_match: reasoning_content = think_match.group(1).strip() - content = re.sub(r'.*?', '', content, flags=re.DOTALL).strip() + content = re.sub(r'(?:)?.*?', '', content, flags=re.DOTALL, count=1).strip() return content, reasoning_content return "没有返回结果", "" @@ -197,6 +199,7 @@ class LLM_request: ] } ], + "max_tokens": 8000, **self.params } @@ -226,10 +229,10 @@ class LLM_request: think_match = None reasoning_content = message.get("reasoning_content", "") if not reasoning_content: - think_match = re.search(r'(.*?)', content, re.DOTALL) + think_match = re.search(r'(?:)?(.*?)', content, re.DOTALL) if think_match: reasoning_content = think_match.group(1).strip() - content = re.sub(r'.*?', '', content, flags=re.DOTALL).strip() + content = re.sub(r'(?:)?.*?', '', content, flags=re.DOTALL, count=1).strip() return content, reasoning_content return "没有返回结果", ""