使generator能够自定义是否切句或者生成错字

跨越了三份文件的参数传递((
This commit is contained in:
A0000Xz
2025-06-26 23:30:31 +08:00
parent 63eb678433
commit 514ccd6be3
3 changed files with 21 additions and 5 deletions

View File

@@ -162,6 +162,8 @@ class DefaultReplyer:
async def generate_reply_with_context(
self,
reply_data: Dict[str, Any],
enable_splitter: bool=True,
enable_chinese_typo: bool=True
) -> Tuple[bool, Optional[List[str]]]:
"""
回复器 (Replier): 核心逻辑,负责生成回复文本。
@@ -191,7 +193,7 @@ class DefaultReplyer:
logger.error(f"{self.log_prefix}LLM 生成失败: {llm_e}")
return False, None # LLM 调用失败则无法生成回复
processed_response = process_llm_response(content)
processed_response = process_llm_response(content,enable_splitter,enable_chinese_typo)
# 5. 处理 LLM 响应
if not content:
@@ -216,6 +218,8 @@ class DefaultReplyer:
async def rewrite_reply_with_context(
self,
reply_data: Dict[str, Any],
enable_splitter: bool=True,
enable_chinese_typo: bool=True
) -> Tuple[bool, Optional[List[str]]]:
"""
表达器 (Expressor): 核心逻辑,负责生成回复文本。
@@ -252,7 +256,7 @@ class DefaultReplyer:
logger.error(f"{self.log_prefix}LLM 生成失败: {llm_e}")
return False, None # LLM 调用失败则无法生成回复
processed_response = process_llm_response(content)
processed_response = process_llm_response(content,enable_splitter,enable_chinese_typo)
# 5. 处理 LLM 响应
if not content: