改各种小问题

This commit is contained in:
春河晴
2025-04-16 17:37:28 +09:00
parent a0b1b1f8d8
commit dc2cf843e5
36 changed files with 114 additions and 107 deletions

View File

@@ -1,7 +1,7 @@
from .emoji_manager import emoji_manager
from ..person_info.relationship_manager import relationship_manager
from .chat_stream import chat_manager
from .message_sender import message_manager
from .messagesender import message_manager
from ..storage.storage import MessageStorage

View File

@@ -42,7 +42,7 @@ class ChatBot:
self._started = True
async def _create_PFC_chat(self, message: MessageRecv):
async def _create_pfc_chat(self, message: MessageRecv):
try:
chat_id = str(message.chat_stream.stream_id)
@@ -112,7 +112,7 @@ class ChatBot:
)
message.update_chat_stream(chat)
await self.only_process_chat.process_message(message)
await self._create_PFC_chat(message)
await self._create_pfc_chat(message)
else:
if groupinfo.group_id in global_config.talk_allowed_groups:
# logger.debug(f"开始群聊模式{str(message_data)[:50]}...")

View File

@@ -13,7 +13,7 @@ from ...common.database import db
from ..config.config import global_config
from ..chat.utils import get_embedding
from ..chat.utils_image import ImageManager, image_path_to_base64
from ..models.utils_model import LLM_request
from ..models.utils_model import LLMRequest
from src.common.logger import get_module_logger
logger = get_module_logger("emoji")
@@ -34,8 +34,8 @@ class EmojiManager:
def __init__(self):
self._scan_task = None
self.vlm = LLM_request(model=global_config.vlm, temperature=0.3, max_tokens=1000, request_type="emoji")
self.llm_emotion_judge = LLM_request(
self.vlm = LLMRequest(model=global_config.vlm, temperature=0.3, max_tokens=1000, request_type="emoji")
self.llm_emotion_judge = LLMRequest(
model=global_config.llm_emotion_judge, max_tokens=600, temperature=0.8, request_type="emoji"
) # 更高的温度更少的token后续可以根据情绪来调整温度

View File

@@ -59,20 +59,20 @@ class MessageBuffer:
logger.debug(f"被新消息覆盖信息id: {cache_msg.message.message_info.message_id}")
# 查找最近的处理成功消息(T)
recent_F_count = 0
recent_f_count = 0
for msg_id in reversed(self.buffer_pool[person_id_]):
msg = self.buffer_pool[person_id_][msg_id]
if msg.result == "T":
break
elif msg.result == "F":
recent_F_count += 1
recent_f_count += 1
# 判断条件最近T之后有超过3-5条F
if recent_F_count >= random.randint(3, 5):
if recent_f_count >= random.randint(3, 5):
new_msg = CacheMessages(message=message, result="T")
new_msg.cache_determination.set()
self.buffer_pool[person_id_][message.message_info.message_id] = new_msg
logger.debug(f"快速处理消息(已堆积{recent_F_count}条F): {message.message_info.message_id}")
logger.debug(f"快速处理消息(已堆积{recent_f_count}条F): {message.message_info.message_id}")
return
# 添加新消息

View File

@@ -23,7 +23,7 @@ sender_config = LogConfig(
logger = get_module_logger("msg_sender", config=sender_config)
class Message_Sender:
class MessageSender:
"""发送器"""
def __init__(self):
@@ -83,7 +83,7 @@ class Message_Sender:
# logger.info(f"发送消息到{end_point}")
# logger.info(message_json)
try:
await global_api.send_message_REST(end_point, message_json)
await global_api.send_message_rest(end_point, message_json)
except Exception as e:
logger.error(f"REST方式发送失败出现错误: {str(e)}")
logger.info("尝试使用ws发送")
@@ -286,4 +286,4 @@ class MessageManager:
# 创建全局消息管理器实例
message_manager = MessageManager()
# 创建全局发送器实例
message_sender = Message_Sender()
message_sender = MessageSender()

View File

@@ -8,7 +8,7 @@ import jieba
import numpy as np
from src.common.logger import get_module_logger
from ..models.utils_model import LLM_request
from ..models.utils_model import LLMRequest
from ..utils.typo_generator import ChineseTypoGenerator
from ..config.config import global_config
from .message import MessageRecv, Message
@@ -91,7 +91,7 @@ def is_mentioned_bot_in_message(message: MessageRecv) -> tuple[bool, float]:
async def get_embedding(text, request_type="embedding"):
"""获取文本的embedding向量"""
llm = LLM_request(model=global_config.embedding, request_type=request_type)
llm = LLMRequest(model=global_config.embedding, request_type=request_type)
# return llm.get_embedding_sync(text)
try:
embedding = await llm.get_embedding(text)
@@ -105,7 +105,7 @@ async def get_recent_group_messages(chat_id: str, limit: int = 12) -> list:
"""从数据库获取群组最近的消息记录
Args:
group_id: 群组ID
chat_id: 群组ID
limit: 获取消息数量默认12条
Returns:

View File

@@ -9,7 +9,7 @@ import io
from ...common.database import db
from ..config.config import global_config
from ..models.utils_model import LLM_request
from ..models.utils_model import LLMRequest
from src.common.logger import get_module_logger
@@ -32,7 +32,7 @@ class ImageManager:
self._ensure_description_collection()
self._ensure_image_dir()
self._initialized = True
self._llm = LLM_request(model=global_config.vlm, temperature=0.4, max_tokens=300, request_type="image")
self._llm = LLMRequest(model=global_config.vlm, temperature=0.4, max_tokens=300, request_type="image")
def _ensure_image_dir(self):
"""确保图像存储目录存在"""