解决遗留冲突,复活心流关系

This commit is contained in:
meng_xi_pan
2025-04-02 04:35:09 +08:00
parent 59043abefc
commit 52e363700a
2 changed files with 7 additions and 10 deletions

View File

@@ -276,11 +276,11 @@ class ThinkFlowChat:
timer2 = time.time() timer2 = time.time()
timing_results["更新心流"] = timer2 - timer1 timing_results["更新心流"] = timer2 - timer1
# # 更新关系 # 更新关系
# timer1 = time.time() timer1 = time.time()
# await self._update_relationship(message, response_set) await self._update_relationship(message, response_set)
# timer2 = time.time() timer2 = time.time()
# timing_results["更新关系"] = timer2 - timer1 timing_results["更新关系"] = timer2 - timer1
# 输出性能计时结果 # 输出性能计时结果
if do_reply: if do_reply:

View File

@@ -42,7 +42,6 @@ class ResponseGenerator:
current_model = self.model_normal current_model = self.model_normal
model_response = await self._generate_response_with_model(message, current_model) model_response = await self._generate_response_with_model(message, current_model)
undivided_response = model_response
# print(f"raw_content: {model_response}") # print(f"raw_content: {model_response}")
@@ -50,10 +49,10 @@ class ResponseGenerator:
logger.info(f"{global_config.BOT_NICKNAME}的回复是:{model_response}") logger.info(f"{global_config.BOT_NICKNAME}的回复是:{model_response}")
model_response = await self._process_response(model_response) model_response = await self._process_response(model_response)
return model_response, undivided_response return model_response
else: else:
logger.info(f"{self.current_model_type}思考,失败") logger.info(f"{self.current_model_type}思考,失败")
return None, None return None
async def _generate_response_with_model(self, message: MessageThinking, model: LLM_request): async def _generate_response_with_model(self, message: MessageThinking, model: LLM_request):
sender_name = "" sender_name = ""
@@ -147,8 +146,6 @@ class ResponseGenerator:
- 严格基于文字直接表达的对立关系判断 - 严格基于文字直接表达的对立关系判断
""" """
logger.info(prompt)
# 调用模型生成结果 # 调用模型生成结果
result, _, _ = await self.model_sum.generate_response(prompt) result, _, _ = await self.model_sum.generate_response(prompt)
result = result.strip() result = result.strip()