fix: 修复LLMRequest类中的思维链提取逻辑,确保正确获取推理内容
This commit is contained in:
@@ -311,7 +311,7 @@ class LLMRequest:
|
|||||||
"""CoT思维链提取"""
|
"""CoT思维链提取"""
|
||||||
match = re.search(r"(?:<think>)?(.*?)</think>", content, re.DOTALL)
|
match = re.search(r"(?:<think>)?(.*?)</think>", content, re.DOTALL)
|
||||||
content = re.sub(r"(?:<think>)?.*?</think>", "", content, flags=re.DOTALL, count=1).strip()
|
content = re.sub(r"(?:<think>)?.*?</think>", "", content, flags=re.DOTALL, count=1).strip()
|
||||||
reasoning = match.group(1).strip() if match else ""
|
reasoning = match[1].strip() if match else ""
|
||||||
return content, reasoning
|
return content, reasoning
|
||||||
|
|
||||||
# === 主要API方法 ===
|
# === 主要API方法 ===
|
||||||
|
|||||||
Reference in New Issue
Block a user