Compare commits
16 Commits
6081eeafea
...
a4f092dbe1
| Author | SHA1 | Date | |
|---|---|---|---|
|
a4f092dbe1
|
|||
|
29b979a04b
|
|||
|
|
c059c7a2f1 | ||
|
|
5b98038425 | ||
|
|
67e33011ef | ||
|
|
125c283d65 | ||
|
|
b8bbd7228f | ||
|
|
5f2bf2f8f4 | ||
|
|
fa8555aeb7 | ||
|
|
5ecfb01552 | ||
|
|
06a45b3639 | ||
|
|
63cb81aab6 | ||
|
|
2e7b434537 | ||
|
|
1dfa44b32b | ||
|
|
43e25378c8 | ||
|
|
53bb77686b |
32
.gitea/workflows/build.yaml
Normal file
32
.gitea/workflows/build.yaml
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
name: Build and Push Docker Image
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- dev
|
||||||
|
- gitea
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build-and-push:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
- name: Set up Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v3
|
||||||
|
- name: Login to Docker Registry
|
||||||
|
uses: docker/login-action@v3
|
||||||
|
with:
|
||||||
|
registry: docker.gardel.top
|
||||||
|
username: ${{ secrets.DOCKER_USERNAME }}
|
||||||
|
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||||
|
- name: Build and Push Docker Image
|
||||||
|
uses: docker/build-push-action@v5
|
||||||
|
with:
|
||||||
|
context: .
|
||||||
|
file: ./Dockerfile
|
||||||
|
push: true
|
||||||
|
tags: docker.gardel.top/gardel/mofox:dev
|
||||||
|
build-args: |
|
||||||
|
BUILD_DATE=$(date -u +'%Y-%m-%dT%H:%M:%SZ')
|
||||||
|
VCS_REF=${{ github.sha }}
|
||||||
4
.github/copilot-instructions.md
vendored
4
.github/copilot-instructions.md
vendored
@@ -157,7 +157,7 @@ python __main__.py # 备用入口
|
|||||||
**调试技巧**:
|
**调试技巧**:
|
||||||
- 检查 `logs/app_*.jsonl` 结构化日志
|
- 检查 `logs/app_*.jsonl` 结构化日志
|
||||||
- 使用 `get_errors()` 工具查看编译错误
|
- 使用 `get_errors()` 工具查看编译错误
|
||||||
- 数据库问题:查看 `data/MaiBot.db`(SQLite)或 MySQL 连接
|
- 数据库问题:查看 `data/MaiBot.db`(SQLite)或 PostgreSQL 连接
|
||||||
|
|
||||||
## 📋 关键约定与模式
|
## 📋 关键约定与模式
|
||||||
|
|
||||||
@@ -165,7 +165,7 @@ python __main__.py # 备用入口
|
|||||||
**全局配置**: `src/config/config.py` 的 `global_config` 单例
|
**全局配置**: `src/config/config.py` 的 `global_config` 单例
|
||||||
- 通过 TOML 文件驱动(`config/bot_config.toml`)
|
- 通过 TOML 文件驱动(`config/bot_config.toml`)
|
||||||
- 支持环境变量覆盖(`.env`)
|
- 支持环境变量覆盖(`.env`)
|
||||||
- 数据库类型切换:`database.database_type = "sqlite" | "mysql"`
|
- 数据库类型切换:`database.database_type = "sqlite" | "postgresql"`
|
||||||
|
|
||||||
### 事件系统
|
### 事件系统
|
||||||
**Event Manager** (`src/plugin_system/core/event_manager.py`):
|
**Event Manager** (`src/plugin_system/core/event_manager.py`):
|
||||||
|
|||||||
149
.github/workflows/docker-image.yml
vendored
149
.github/workflows/docker-image.yml
vendored
@@ -1,149 +0,0 @@
|
|||||||
name: Docker Build and Push
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- master
|
|
||||||
- dev
|
|
||||||
tags:
|
|
||||||
- "v*.*.*"
|
|
||||||
- "v*"
|
|
||||||
- "*.*.*"
|
|
||||||
- "*.*.*-*"
|
|
||||||
workflow_dispatch: # 允许手动触发工作流
|
|
||||||
|
|
||||||
# Workflow's jobs
|
|
||||||
jobs:
|
|
||||||
build-amd64:
|
|
||||||
name: Build AMD64 Image
|
|
||||||
runs-on: ubuntu-24.04
|
|
||||||
outputs:
|
|
||||||
digest: ${{ steps.build.outputs.digest }}
|
|
||||||
steps:
|
|
||||||
- name: Check out git repository
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
|
||||||
uses: docker/setup-buildx-action@v3
|
|
||||||
with:
|
|
||||||
buildkitd-flags: --debug
|
|
||||||
|
|
||||||
# Log in docker hub
|
|
||||||
- name: Log in to Docker Hub
|
|
||||||
uses: docker/login-action@v3
|
|
||||||
with:
|
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
|
||||||
|
|
||||||
# Generate metadata for Docker images
|
|
||||||
- name: Docker meta
|
|
||||||
id: meta
|
|
||||||
uses: docker/metadata-action@v5
|
|
||||||
with:
|
|
||||||
images: ${{ secrets.DOCKERHUB_USERNAME }}/mofox
|
|
||||||
|
|
||||||
# Build and push AMD64 image by digest
|
|
||||||
- name: Build and push AMD64
|
|
||||||
id: build
|
|
||||||
uses: docker/build-push-action@v5
|
|
||||||
with:
|
|
||||||
context: .
|
|
||||||
platforms: linux/amd64
|
|
||||||
labels: ${{ steps.meta.outputs.labels }}
|
|
||||||
file: ./Dockerfile
|
|
||||||
cache-from: type=registry,ref=${{ secrets.DOCKERHUB_USERNAME }}/mofox:amd64-buildcache
|
|
||||||
cache-to: type=registry,ref=${{ secrets.DOCKERHUB_USERNAME }}/mofox:amd64-buildcache,mode=max
|
|
||||||
outputs: type=image,name=${{ secrets.DOCKERHUB_USERNAME }}/mofox,push-by-digest=true,name-canonical=true,push=true
|
|
||||||
build-args: |
|
|
||||||
BUILD_DATE=$(date -u +'%Y-%m-%dT%H:%M:%SZ')
|
|
||||||
VCS_REF=${{ github.sha }}
|
|
||||||
|
|
||||||
build-arm64:
|
|
||||||
name: Build ARM64 Image
|
|
||||||
runs-on: ubuntu-24.04-arm
|
|
||||||
outputs:
|
|
||||||
digest: ${{ steps.build.outputs.digest }}
|
|
||||||
steps:
|
|
||||||
- name: Check out git repository
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
|
||||||
uses: docker/setup-buildx-action@v3
|
|
||||||
with:
|
|
||||||
buildkitd-flags: --debug
|
|
||||||
|
|
||||||
# Log in docker hub
|
|
||||||
- name: Log in to Docker Hub
|
|
||||||
uses: docker/login-action@v3
|
|
||||||
with:
|
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
|
||||||
|
|
||||||
# Generate metadata for Docker images
|
|
||||||
- name: Docker meta
|
|
||||||
id: meta
|
|
||||||
uses: docker/metadata-action@v5
|
|
||||||
with:
|
|
||||||
images: ${{ secrets.DOCKERHUB_USERNAME }}/mofox
|
|
||||||
|
|
||||||
# Build and push ARM64 image by digest
|
|
||||||
- name: Build and push ARM64
|
|
||||||
id: build
|
|
||||||
uses: docker/build-push-action@v5
|
|
||||||
with:
|
|
||||||
context: .
|
|
||||||
platforms: linux/arm64/v8
|
|
||||||
labels: ${{ steps.meta.outputs.labels }}
|
|
||||||
file: ./Dockerfile
|
|
||||||
cache-from: type=registry,ref=${{ secrets.DOCKERHUB_USERNAME }}/mofox:arm64-buildcache
|
|
||||||
cache-to: type=registry,ref=${{ secrets.DOCKERHUB_USERNAME }}/mofox:arm64-buildcache,mode=max
|
|
||||||
outputs: type=image,name=${{ secrets.DOCKERHUB_USERNAME }}/mofox,push-by-digest=true,name-canonical=true,push=true
|
|
||||||
build-args: |
|
|
||||||
BUILD_DATE=$(date -u +'%Y-%m-%dT%H:%M:%SZ')
|
|
||||||
VCS_REF=${{ github.sha }}
|
|
||||||
|
|
||||||
create-manifest:
|
|
||||||
name: Create Multi-Arch Manifest
|
|
||||||
runs-on: ubuntu-24.04
|
|
||||||
needs:
|
|
||||||
- build-amd64
|
|
||||||
- build-arm64
|
|
||||||
steps:
|
|
||||||
- name: Set up Docker Buildx
|
|
||||||
uses: docker/setup-buildx-action@v3
|
|
||||||
|
|
||||||
# Log in docker hub
|
|
||||||
- name: Log in to Docker Hub
|
|
||||||
uses: docker/login-action@v3
|
|
||||||
with:
|
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
|
||||||
|
|
||||||
# Generate metadata for Docker images
|
|
||||||
- name: Docker meta
|
|
||||||
id: meta
|
|
||||||
uses: docker/metadata-action@v5
|
|
||||||
with:
|
|
||||||
images: ${{ secrets.DOCKERHUB_USERNAME }}/mofox
|
|
||||||
tags: |
|
|
||||||
type=ref,event=branch
|
|
||||||
type=ref,event=tag
|
|
||||||
type=raw,value=latest,enable=${{ github.ref == 'refs/heads/main' }}
|
|
||||||
type=semver,pattern={{version}}
|
|
||||||
type=semver,pattern={{major}}.{{minor}}
|
|
||||||
type=semver,pattern={{major}}
|
|
||||||
type=sha,prefix=${{ github.ref_name }}-,enable=${{ github.ref_type == 'branch' }}
|
|
||||||
|
|
||||||
- name: Create and Push Manifest
|
|
||||||
run: |
|
|
||||||
# 为每个标签创建多架构镜像
|
|
||||||
for tag in $(echo "${{ steps.meta.outputs.tags }}" | tr '\n' ' '); do
|
|
||||||
echo "Creating manifest for $tag"
|
|
||||||
docker buildx imagetools create -t $tag \
|
|
||||||
${{ secrets.DOCKERHUB_USERNAME }}/mofox@${{ needs.build-amd64.outputs.digest }} \
|
|
||||||
${{ secrets.DOCKERHUB_USERNAME }}/mofox@${{ needs.build-arm64.outputs.digest }}
|
|
||||||
done
|
|
||||||
@@ -75,7 +75,7 @@
|
|||||||
### 🚀 拓展功能
|
### 🚀 拓展功能
|
||||||
|
|
||||||
- 🧠 **AFC 智能对话** - 基于亲和力流,实现兴趣感知和动态关系构建
|
- 🧠 **AFC 智能对话** - 基于亲和力流,实现兴趣感知和动态关系构建
|
||||||
- 🔄 **数据库切换** - 支持 SQLite 与 MySQL 自由切换,采用 SQLAlchemy 2.0 重新构建
|
- 🔄 **数据库切换** - 支持 SQLite 与 PostgreSQL 自由切换,采用 SQLAlchemy 2.0 重新构建
|
||||||
- 🛡️ **反注入集成** - 内置一整套回复前注入过滤系统,为人格保驾护航
|
- 🛡️ **反注入集成** - 内置一整套回复前注入过滤系统,为人格保驾护航
|
||||||
- 🎥 **视频分析** - 支持多种视频识别模式,拓展原版视觉
|
- 🎥 **视频分析** - 支持多种视频识别模式,拓展原版视觉
|
||||||
- 📅 **日程系统** - 让MoFox规划每一天
|
- 📅 **日程系统** - 让MoFox规划每一天
|
||||||
@@ -109,7 +109,7 @@
|
|||||||
| 服务 | 描述 |
|
| 服务 | 描述 |
|
||||||
| ------------ | ------------------------------------------ |
|
| ------------ | ------------------------------------------ |
|
||||||
| 🤖 QQ 协议端 | [NapCatQQ](https://github.com/NapNeko/NapCatQQ) 或其他兼容协议端 |
|
| 🤖 QQ 协议端 | [NapCatQQ](https://github.com/NapNeko/NapCatQQ) 或其他兼容协议端 |
|
||||||
| 🗃️ 数据库 | SQLite(默认)或 MySQL(可选) |
|
| 🗃️ 数据库 | SQLite(默认)或 PostgreSQL(可选) |
|
||||||
| 🔧 管理工具 | Chat2DB(可选,用于数据库可视化管理) |
|
| 🔧 管理工具 | Chat2DB(可选,用于数据库可视化管理) |
|
||||||
|
|
||||||
---
|
---
|
||||||
@@ -133,7 +133,7 @@
|
|||||||
|
|
||||||
1. 📝 **核心配置**:编辑 `config/bot_config.toml`,设置 LLM API Key、Bot 名称等基础参数。
|
1. 📝 **核心配置**:编辑 `config/bot_config.toml`,设置 LLM API Key、Bot 名称等基础参数。
|
||||||
2. 🤖 **协议端配置**:确保使用 [NapCatQQ](https://github.com/NapNeko/NapCatQQ) 或兼容协议端,建立稳定通信。
|
2. 🤖 **协议端配置**:确保使用 [NapCatQQ](https://github.com/NapNeko/NapCatQQ) 或兼容协议端,建立稳定通信。
|
||||||
3. 🗃️ **数据库配置**:选择 SQLite(默认)或配置 MySQL 数据库连接。
|
3. 🗃️ **数据库配置**:选择 SQLite(默认)或配置 PostgreSQL 数据库连接。
|
||||||
4. 🔌 **插件配置**:在 `config/plugins/` 目录中启用或配置所需插件。
|
4. 🔌 **插件配置**:在 `config/plugins/` 目录中启用或配置所需插件。
|
||||||
|
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
2
bot.py
2
bot.py
@@ -21,7 +21,7 @@ logger = get_logger("main")
|
|||||||
install(extra_lines=3)
|
install(extra_lines=3)
|
||||||
|
|
||||||
# 常量定义
|
# 常量定义
|
||||||
SUPPORTED_DATABASES = ["sqlite", "mysql", "postgresql"]
|
SUPPORTED_DATABASES = ["sqlite", "postgresql"]
|
||||||
SHUTDOWN_TIMEOUT = 10.0
|
SHUTDOWN_TIMEOUT = 10.0
|
||||||
EULA_CHECK_INTERVAL = 2
|
EULA_CHECK_INTERVAL = 2
|
||||||
MAX_EULA_CHECK_ATTEMPTS = 30
|
MAX_EULA_CHECK_ATTEMPTS = 30
|
||||||
|
|||||||
@@ -32,6 +32,7 @@ dependencies = [
|
|||||||
"lxml>=6.0.0",
|
"lxml>=6.0.0",
|
||||||
"matplotlib>=3.10.3",
|
"matplotlib>=3.10.3",
|
||||||
"networkx>=3.4.2",
|
"networkx>=3.4.2",
|
||||||
|
"objgraph>=3.6.2",
|
||||||
"orjson>=3.10",
|
"orjson>=3.10",
|
||||||
"numpy>=2.2.6",
|
"numpy>=2.2.6",
|
||||||
"openai>=2.5.0",
|
"openai>=2.5.0",
|
||||||
@@ -42,11 +43,11 @@ dependencies = [
|
|||||||
"pillow>=12.0.0",
|
"pillow>=12.0.0",
|
||||||
"pip-check-reqs>=2.5.5",
|
"pip-check-reqs>=2.5.5",
|
||||||
"psutil>=7.0.0",
|
"psutil>=7.0.0",
|
||||||
|
"pympler>=1.1",
|
||||||
"pyarrow>=21.0.0",
|
"pyarrow>=21.0.0",
|
||||||
"pydantic>=2.12.3",
|
"pydantic>=2.12.3",
|
||||||
"pygments>=2.19.2",
|
"pygments>=2.19.2",
|
||||||
"pymongo>=4.13.2",
|
"pymongo>=4.13.2",
|
||||||
"pymysql>=1.1.1",
|
|
||||||
"pypinyin>=0.54.0",
|
"pypinyin>=0.54.0",
|
||||||
"PyYAML>=6.0",
|
"PyYAML>=6.0",
|
||||||
"python-dateutil>=2.9.0.post0",
|
"python-dateutil>=2.9.0.post0",
|
||||||
@@ -74,15 +75,13 @@ dependencies = [
|
|||||||
"uvicorn>=0.35.0",
|
"uvicorn>=0.35.0",
|
||||||
"watchdog>=6.0.0",
|
"watchdog>=6.0.0",
|
||||||
"websockets>=15.0.1",
|
"websockets>=15.0.1",
|
||||||
"aiomysql>=0.2.0",
|
|
||||||
"aiosqlite>=0.21.0",
|
"aiosqlite>=0.21.0",
|
||||||
"inkfox>=0.1.1",
|
"inkfox>=0.1.1",
|
||||||
"rjieba>=0.1.13",
|
"rjieba>=0.1.13",
|
||||||
"fastmcp>=2.13.0",
|
"fastmcp>=2.13.0",
|
||||||
"mofox-wire",
|
"mofox-wire",
|
||||||
"jinja2>=3.1.0",
|
"jinja2>=3.1.0",
|
||||||
"psycopg2-binary",
|
"psycopg2-binary"
|
||||||
"PyMySQL"
|
|
||||||
]
|
]
|
||||||
|
|
||||||
[[tool.uv.index]]
|
[[tool.uv.index]]
|
||||||
|
|||||||
@@ -1,10 +1,8 @@
|
|||||||
aiosqlite
|
aiosqlite
|
||||||
aiofiles
|
aiofiles
|
||||||
aiomysql
|
|
||||||
asyncpg
|
asyncpg
|
||||||
psycopg[binary]
|
psycopg[binary]
|
||||||
psycopg2-binary
|
psycopg2-binary
|
||||||
PyMySQL
|
|
||||||
APScheduler
|
APScheduler
|
||||||
aiohttp
|
aiohttp
|
||||||
aiohttp-cors
|
aiohttp-cors
|
||||||
|
|||||||
@@ -2,14 +2,12 @@
|
|||||||
"""数据库迁移脚本
|
"""数据库迁移脚本
|
||||||
|
|
||||||
支持在不同数据库之间迁移数据:
|
支持在不同数据库之间迁移数据:
|
||||||
- SQLite <-> MySQL
|
|
||||||
- SQLite <-> PostgreSQL
|
- SQLite <-> PostgreSQL
|
||||||
- MySQL <-> PostgreSQL
|
|
||||||
|
|
||||||
使用方法:
|
使用方法:
|
||||||
python scripts/migrate_database.py --help
|
python scripts/migrate_database.py --help
|
||||||
python scripts/migrate_database.py --source sqlite --target postgresql
|
python scripts/migrate_database.py --source sqlite --target postgresql
|
||||||
python scripts/migrate_database.py --source mysql --target postgresql --batch-size 5000
|
python scripts/migrate_database.py --source postgresql --target sqlite --batch-size 5000
|
||||||
|
|
||||||
# 交互式向导模式(推荐)
|
# 交互式向导模式(推荐)
|
||||||
python scripts/migrate_database.py
|
python scripts/migrate_database.py
|
||||||
@@ -25,7 +23,7 @@
|
|||||||
实现细节:
|
实现细节:
|
||||||
- 使用 SQLAlchemy 进行数据库连接和元数据管理
|
- 使用 SQLAlchemy 进行数据库连接和元数据管理
|
||||||
- 采用流式迁移,避免一次性加载过多数据
|
- 采用流式迁移,避免一次性加载过多数据
|
||||||
- 支持 SQLite、MySQL、PostgreSQL 之间的互相迁移
|
- 支持 SQLite、PostgreSQL 之间的互相迁移
|
||||||
- 批量插入失败时自动降级为逐行插入,最大程度保留数据
|
- 批量插入失败时自动降级为逐行插入,最大程度保留数据
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@@ -124,7 +122,7 @@ def get_database_config_from_toml(db_type: str) -> dict | None:
|
|||||||
"""从 bot_config.toml 中读取数据库配置
|
"""从 bot_config.toml 中读取数据库配置
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
db_type: 数据库类型,支持 "sqlite"、"mysql"、"postgresql"
|
db_type: 数据库类型,支持 "sqlite"、"postgresql"
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
dict: 数据库配置字典,如果对应配置不存在则返回 None
|
dict: 数据库配置字典,如果对应配置不存在则返回 None
|
||||||
@@ -148,28 +146,6 @@ def get_database_config_from_toml(db_type: str) -> dict | None:
|
|||||||
sqlite_path = os.path.join(PROJECT_ROOT, sqlite_path)
|
sqlite_path = os.path.join(PROJECT_ROOT, sqlite_path)
|
||||||
return {"path": sqlite_path}
|
return {"path": sqlite_path}
|
||||||
|
|
||||||
elif db_type == "mysql":
|
|
||||||
return {
|
|
||||||
"host": db_config.get("mysql_host")
|
|
||||||
or config_data.get("mysql_host")
|
|
||||||
or "localhost",
|
|
||||||
"port": db_config.get("mysql_port")
|
|
||||||
or config_data.get("mysql_port")
|
|
||||||
or 3306,
|
|
||||||
"database": db_config.get("mysql_database")
|
|
||||||
or config_data.get("mysql_database")
|
|
||||||
or "maibot",
|
|
||||||
"user": db_config.get("mysql_user")
|
|
||||||
or config_data.get("mysql_user")
|
|
||||||
or "root",
|
|
||||||
"password": db_config.get("mysql_password")
|
|
||||||
or config_data.get("mysql_password")
|
|
||||||
or "",
|
|
||||||
"charset": db_config.get("mysql_charset")
|
|
||||||
or config_data.get("mysql_charset")
|
|
||||||
or "utf8mb4",
|
|
||||||
}
|
|
||||||
|
|
||||||
elif db_type == "postgresql":
|
elif db_type == "postgresql":
|
||||||
return {
|
return {
|
||||||
"host": db_config.get("postgresql_host")
|
"host": db_config.get("postgresql_host")
|
||||||
@@ -257,7 +233,7 @@ def create_engine_by_type(db_type: str, config: dict) -> Engine:
|
|||||||
"""根据数据库类型创建对应的 SQLAlchemy Engine
|
"""根据数据库类型创建对应的 SQLAlchemy Engine
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
db_type: 数据库类型,支持 sqlite/mysql/postgresql
|
db_type: 数据库类型,支持 sqlite/postgresql
|
||||||
config: 配置字典
|
config: 配置字典
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
@@ -266,15 +242,6 @@ def create_engine_by_type(db_type: str, config: dict) -> Engine:
|
|||||||
db_type = db_type.lower()
|
db_type = db_type.lower()
|
||||||
if db_type == "sqlite":
|
if db_type == "sqlite":
|
||||||
return create_sqlite_engine(config["path"])
|
return create_sqlite_engine(config["path"])
|
||||||
elif db_type == "mysql":
|
|
||||||
return create_mysql_engine(
|
|
||||||
host=config["host"],
|
|
||||||
port=config["port"],
|
|
||||||
database=config["database"],
|
|
||||||
user=config["user"],
|
|
||||||
password=config["password"],
|
|
||||||
charset=config.get("charset", "utf8mb4"),
|
|
||||||
)
|
|
||||||
elif db_type == "postgresql":
|
elif db_type == "postgresql":
|
||||||
return create_postgresql_engine(
|
return create_postgresql_engine(
|
||||||
host=config["host"],
|
host=config["host"],
|
||||||
@@ -512,7 +479,7 @@ def migrate_table_data(
|
|||||||
source_table: 源表对象
|
source_table: 源表对象
|
||||||
target_table: 目标表对象
|
target_table: 目标表对象
|
||||||
batch_size: 每批次处理大小
|
batch_size: 每批次处理大小
|
||||||
target_dialect: 目标数据库方言 (sqlite/mysql/postgresql)
|
target_dialect: 目标数据库方言 (sqlite/postgresql)
|
||||||
row_limit: 最大迁移行数限制,None 表示不限制
|
row_limit: 最大迁移行数限制,None 表示不限制
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
@@ -738,7 +705,7 @@ class DatabaseMigrator:
|
|||||||
|
|
||||||
def _validate_database_types(self):
|
def _validate_database_types(self):
|
||||||
"""验证数据库类型"""
|
"""验证数据库类型"""
|
||||||
supported_types = {"sqlite", "mysql", "postgresql"}
|
supported_types = {"sqlite", "postgresql"}
|
||||||
if self.source_type not in supported_types:
|
if self.source_type not in supported_types:
|
||||||
raise ValueError(f"不支持的源数据库类型: {self.source_type}")
|
raise ValueError(f"不支持的源数据库类型: {self.source_type}")
|
||||||
if self.target_type not in supported_types:
|
if self.target_type not in supported_types:
|
||||||
@@ -995,7 +962,7 @@ class DatabaseMigrator:
|
|||||||
def parse_args():
|
def parse_args():
|
||||||
"""解析命令行参数"""
|
"""解析命令行参数"""
|
||||||
parser = argparse.ArgumentParser(
|
parser = argparse.ArgumentParser(
|
||||||
description="数据库迁移工具 - 在 SQLite、MySQL、PostgreSQL 之间迁移数据",
|
description="数据库迁移工具 - 在 SQLite、PostgreSQL 之间迁移数据",
|
||||||
formatter_class=argparse.RawDescriptionHelpFormatter,
|
formatter_class=argparse.RawDescriptionHelpFormatter,
|
||||||
epilog="""示例:
|
epilog="""示例:
|
||||||
# 从 SQLite 迁移到 PostgreSQL
|
# 从 SQLite 迁移到 PostgreSQL
|
||||||
@@ -1008,15 +975,16 @@ def parse_args():
|
|||||||
--target-user postgres \
|
--target-user postgres \
|
||||||
--target-password your_password
|
--target-password your_password
|
||||||
|
|
||||||
# 从 SQLite 迁移到 MySQL
|
# 从 PostgreSQL 迁移到 SQLite
|
||||||
python scripts/migrate_database.py \
|
python scripts/migrate_database.py \
|
||||||
--source sqlite \
|
--source postgresql \
|
||||||
--target mysql \
|
--source-host localhost \
|
||||||
--target-host localhost \
|
--source-port 5432 \
|
||||||
--target-port 3306 \
|
--source-database maibot \
|
||||||
--target-database maibot \
|
--source-user postgres \
|
||||||
--target-user root \
|
--source-password your_password \
|
||||||
--target-password your_password
|
--target sqlite \
|
||||||
|
--target-path data/MaiBot_backup.db
|
||||||
|
|
||||||
# 使用交互式向导模式(推荐)
|
# 使用交互式向导模式(推荐)
|
||||||
python scripts/migrate_database.py
|
python scripts/migrate_database.py
|
||||||
@@ -1028,13 +996,13 @@ def parse_args():
|
|||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--source",
|
"--source",
|
||||||
type=str,
|
type=str,
|
||||||
choices=["sqlite", "mysql", "postgresql"],
|
choices=["sqlite", "postgresql"],
|
||||||
help="源数据库类型(不指定时,在交互模式中选择)",
|
help="源数据库类型(不指定时,在交互模式中选择)",
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--target",
|
"--target",
|
||||||
type=str,
|
type=str,
|
||||||
choices=["sqlite", "mysql", "postgresql"],
|
choices=["sqlite", "postgresql"],
|
||||||
help="目标数据库类型(不指定时,在交互模式中选择)",
|
help="目标数据库类型(不指定时,在交互模式中选择)",
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
@@ -1053,8 +1021,8 @@ def parse_args():
|
|||||||
# 源数据库参数(可选,默认从 bot_config.toml 读取)
|
# 源数据库参数(可选,默认从 bot_config.toml 读取)
|
||||||
source_group = parser.add_argument_group("源数据库配置(可选,默认从 bot_config.toml 读取)")
|
source_group = parser.add_argument_group("源数据库配置(可选,默认从 bot_config.toml 读取)")
|
||||||
source_group.add_argument("--source-path", type=str, help="SQLite 数据库路径")
|
source_group.add_argument("--source-path", type=str, help="SQLite 数据库路径")
|
||||||
source_group.add_argument("--source-host", type=str, help="MySQL/PostgreSQL 主机")
|
source_group.add_argument("--source-host", type=str, help="PostgreSQL 主机")
|
||||||
source_group.add_argument("--source-port", type=int, help="MySQL/PostgreSQL 端口")
|
source_group.add_argument("--source-port", type=int, help="PostgreSQL 端口")
|
||||||
source_group.add_argument("--source-database", type=str, help="数据库名")
|
source_group.add_argument("--source-database", type=str, help="数据库名")
|
||||||
source_group.add_argument("--source-user", type=str, help="用户名")
|
source_group.add_argument("--source-user", type=str, help="用户名")
|
||||||
source_group.add_argument("--source-password", type=str, help="密码")
|
source_group.add_argument("--source-password", type=str, help="密码")
|
||||||
@@ -1062,13 +1030,12 @@ def parse_args():
|
|||||||
# 目标数据库参数
|
# 目标数据库参数
|
||||||
target_group = parser.add_argument_group("目标数据库配置")
|
target_group = parser.add_argument_group("目标数据库配置")
|
||||||
target_group.add_argument("--target-path", type=str, help="SQLite 数据库路径")
|
target_group.add_argument("--target-path", type=str, help="SQLite 数据库路径")
|
||||||
target_group.add_argument("--target-host", type=str, help="MySQL/PostgreSQL 主机")
|
target_group.add_argument("--target-host", type=str, help="PostgreSQL 主机")
|
||||||
target_group.add_argument("--target-port", type=int, help="MySQL/PostgreSQL 端口")
|
target_group.add_argument("--target-port", type=int, help="PostgreSQL 端口")
|
||||||
target_group.add_argument("--target-database", type=str, help="数据库名")
|
target_group.add_argument("--target-database", type=str, help="数据库名")
|
||||||
target_group.add_argument("--target-user", type=str, help="用户名")
|
target_group.add_argument("--target-user", type=str, help="用户名")
|
||||||
target_group.add_argument("--target-password", type=str, help="密码")
|
target_group.add_argument("--target-password", type=str, help="密码")
|
||||||
target_group.add_argument("--target-schema", type=str, default="public", help="PostgreSQL schema")
|
target_group.add_argument("--target-schema", type=str, default="public", help="PostgreSQL schema")
|
||||||
target_group.add_argument("--target-charset", type=str, default="utf8mb4", help="MySQL 字符集")
|
|
||||||
|
|
||||||
# 跳过表参数
|
# 跳过表参数
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
@@ -1113,24 +1080,20 @@ def build_config_from_args(args, prefix: str, db_type: str) -> dict | None:
|
|||||||
return {"path": path}
|
return {"path": path}
|
||||||
return None
|
return None
|
||||||
|
|
||||||
elif db_type in ("mysql", "postgresql"):
|
elif db_type == "postgresql":
|
||||||
host = getattr(args, f"{prefix}_host", None)
|
host = getattr(args, f"{prefix}_host", None)
|
||||||
if not host:
|
if not host:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
config = {
|
config = {
|
||||||
"host": host,
|
"host": host,
|
||||||
"port": getattr(args, f"{prefix}_port") or (3306 if db_type == "mysql" else 5432),
|
"port": getattr(args, f"{prefix}_port") or 5432,
|
||||||
"database": getattr(args, f"{prefix}_database") or "maibot",
|
"database": getattr(args, f"{prefix}_database") or "maibot",
|
||||||
"user": getattr(args, f"{prefix}_user") or ("root" if db_type == "mysql" else "postgres"),
|
"user": getattr(args, f"{prefix}_user") or "postgres",
|
||||||
"password": getattr(args, f"{prefix}_password") or "",
|
"password": getattr(args, f"{prefix}_password") or "",
|
||||||
|
"schema": getattr(args, f"{prefix}_schema", "public"),
|
||||||
}
|
}
|
||||||
|
|
||||||
if db_type == "mysql":
|
|
||||||
config["charset"] = getattr(args, f"{prefix}_charset", "utf8mb4")
|
|
||||||
elif db_type == "postgresql":
|
|
||||||
config["schema"] = getattr(args, f"{prefix}_schema", "public")
|
|
||||||
|
|
||||||
return config
|
return config
|
||||||
|
|
||||||
return None
|
return None
|
||||||
@@ -1201,14 +1164,14 @@ def interactive_setup() -> dict:
|
|||||||
print("只需回答几个问题,我会帮你构造迁移配置。")
|
print("只需回答几个问题,我会帮你构造迁移配置。")
|
||||||
print("=" * 60)
|
print("=" * 60)
|
||||||
|
|
||||||
db_types = ["sqlite", "mysql", "postgresql"]
|
db_types = ["sqlite", "postgresql"]
|
||||||
|
|
||||||
# 选择源数据库
|
# 选择源数据库
|
||||||
source_type = _ask_choice("请选择【源数据库类型】:", db_types, default_index=0)
|
source_type = _ask_choice("请选择【源数据库类型】:", db_types, default_index=0)
|
||||||
|
|
||||||
# 选择目标数据库(不能与源相同)
|
# 选择目标数据库(不能与源相同)
|
||||||
while True:
|
while True:
|
||||||
default_idx = 2 if len(db_types) >= 3 else 0
|
default_idx = 1 if len(db_types) >= 2 else 0
|
||||||
target_type = _ask_choice("请选择【目标数据库类型】:", db_types, default_index=default_idx)
|
target_type = _ask_choice("请选择【目标数据库类型】:", db_types, default_index=default_idx)
|
||||||
if target_type != source_type:
|
if target_type != source_type:
|
||||||
break
|
break
|
||||||
@@ -1231,8 +1194,8 @@ def interactive_setup() -> dict:
|
|||||||
source_path = _ask_str("源 SQLite 文件路径", default="data/MaiBot.db")
|
source_path = _ask_str("源 SQLite 文件路径", default="data/MaiBot.db")
|
||||||
source_config = {"path": source_path}
|
source_config = {"path": source_path}
|
||||||
else:
|
else:
|
||||||
port_default = 3306 if source_type == "mysql" else 5432
|
port_default = 5432
|
||||||
user_default = "root" if source_type == "mysql" else "postgres"
|
user_default = "postgres"
|
||||||
host = _ask_str("源数据库 host", default="localhost")
|
host = _ask_str("源数据库 host", default="localhost")
|
||||||
port = _ask_int("源数据库 port", default=port_default)
|
port = _ask_int("源数据库 port", default=port_default)
|
||||||
database = _ask_str("源数据库名", default="maibot")
|
database = _ask_str("源数据库名", default="maibot")
|
||||||
@@ -1245,9 +1208,7 @@ def interactive_setup() -> dict:
|
|||||||
"user": user,
|
"user": user,
|
||||||
"password": password,
|
"password": password,
|
||||||
}
|
}
|
||||||
if source_type == "mysql":
|
if source_type == "postgresql":
|
||||||
source_config["charset"] = _ask_str("源数据库字符集", default="utf8mb4")
|
|
||||||
elif source_type == "postgresql":
|
|
||||||
source_config["schema"] = _ask_str("源数据库 schema", default="public")
|
source_config["schema"] = _ask_str("源数据库 schema", default="public")
|
||||||
|
|
||||||
# 目标数据库配置(必须显式确认)
|
# 目标数据库配置(必须显式确认)
|
||||||
@@ -1260,8 +1221,8 @@ def interactive_setup() -> dict:
|
|||||||
)
|
)
|
||||||
target_config = {"path": target_path}
|
target_config = {"path": target_path}
|
||||||
else:
|
else:
|
||||||
port_default = 3306 if target_type == "mysql" else 5432
|
port_default = 5432
|
||||||
user_default = "root" if target_type == "mysql" else "postgres"
|
user_default = "postgres"
|
||||||
host = _ask_str("目标数据库 host", default="localhost")
|
host = _ask_str("目标数据库 host", default="localhost")
|
||||||
port = _ask_int("目标数据库 port", default=port_default)
|
port = _ask_int("目标数据库 port", default=port_default)
|
||||||
database = _ask_str("目标数据库名", default="maibot")
|
database = _ask_str("目标数据库名", default="maibot")
|
||||||
@@ -1275,9 +1236,7 @@ def interactive_setup() -> dict:
|
|||||||
"user": user,
|
"user": user,
|
||||||
"password": password,
|
"password": password,
|
||||||
}
|
}
|
||||||
if target_type == "mysql":
|
if target_type == "postgresql":
|
||||||
target_config["charset"] = _ask_str("目标数据库字符集", default="utf8mb4")
|
|
||||||
elif target_type == "postgresql":
|
|
||||||
target_config["schema"] = _ask_str("目标数据库 schema", default="public")
|
target_config["schema"] = _ask_str("目标数据库 schema", default="public")
|
||||||
|
|
||||||
print()
|
print()
|
||||||
|
|||||||
@@ -1,14 +1,25 @@
|
|||||||
"""
|
"""
|
||||||
消息管理器模块
|
消息管理器模块
|
||||||
提供统一的消息管理、上下文管理和流循环调度功能
|
提供统一的消息管理、上下文管理和流循环调度功能
|
||||||
|
|
||||||
|
基于 Generator + Tick 的事件驱动模式
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from .distribution_manager import StreamLoopManager, stream_loop_manager
|
from .distribution_manager import (
|
||||||
|
ConversationTick,
|
||||||
|
StreamLoopManager,
|
||||||
|
conversation_loop,
|
||||||
|
run_chat_stream,
|
||||||
|
stream_loop_manager,
|
||||||
|
)
|
||||||
from .message_manager import MessageManager, message_manager
|
from .message_manager import MessageManager, message_manager
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
|
"ConversationTick",
|
||||||
"MessageManager",
|
"MessageManager",
|
||||||
"StreamLoopManager",
|
"StreamLoopManager",
|
||||||
|
"conversation_loop",
|
||||||
"message_manager",
|
"message_manager",
|
||||||
|
"run_chat_stream",
|
||||||
"stream_loop_manager",
|
"stream_loop_manager",
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -234,13 +234,6 @@ class BatchDatabaseWriter:
|
|||||||
|
|
||||||
stmt = sqlite_insert(ChatStreams).values(stream_id=stream_id, **update_data)
|
stmt = sqlite_insert(ChatStreams).values(stream_id=stream_id, **update_data)
|
||||||
stmt = stmt.on_conflict_do_update(index_elements=["stream_id"], set_=update_data)
|
stmt = stmt.on_conflict_do_update(index_elements=["stream_id"], set_=update_data)
|
||||||
elif global_config.database.database_type == "mysql":
|
|
||||||
from sqlalchemy.dialects.mysql import insert as mysql_insert
|
|
||||||
|
|
||||||
stmt = mysql_insert(ChatStreams).values(stream_id=stream_id, **update_data)
|
|
||||||
stmt = stmt.on_duplicate_key_update(
|
|
||||||
**{key: value for key, value in update_data.items() if key != "stream_id"}
|
|
||||||
)
|
|
||||||
elif global_config.database.database_type == "postgresql":
|
elif global_config.database.database_type == "postgresql":
|
||||||
from sqlalchemy.dialects.postgresql import insert as pg_insert
|
from sqlalchemy.dialects.postgresql import insert as pg_insert
|
||||||
|
|
||||||
@@ -268,13 +261,6 @@ class BatchDatabaseWriter:
|
|||||||
|
|
||||||
stmt = sqlite_insert(ChatStreams).values(stream_id=stream_id, **update_data)
|
stmt = sqlite_insert(ChatStreams).values(stream_id=stream_id, **update_data)
|
||||||
stmt = stmt.on_conflict_do_update(index_elements=["stream_id"], set_=update_data)
|
stmt = stmt.on_conflict_do_update(index_elements=["stream_id"], set_=update_data)
|
||||||
elif global_config.database.database_type == "mysql":
|
|
||||||
from sqlalchemy.dialects.mysql import insert as mysql_insert
|
|
||||||
|
|
||||||
stmt = mysql_insert(ChatStreams).values(stream_id=stream_id, **update_data)
|
|
||||||
stmt = stmt.on_duplicate_key_update(
|
|
||||||
**{key: value for key, value in update_data.items() if key != "stream_id"}
|
|
||||||
)
|
|
||||||
elif global_config.database.database_type == "postgresql":
|
elif global_config.database.database_type == "postgresql":
|
||||||
from sqlalchemy.dialects.postgresql import insert as pg_insert
|
from sqlalchemy.dialects.postgresql import insert as pg_insert
|
||||||
|
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -3,7 +3,6 @@ import hashlib
|
|||||||
import time
|
import time
|
||||||
|
|
||||||
from rich.traceback import install
|
from rich.traceback import install
|
||||||
from sqlalchemy.dialects.mysql import insert as mysql_insert
|
|
||||||
from sqlalchemy.dialects.postgresql import insert as pg_insert
|
from sqlalchemy.dialects.postgresql import insert as pg_insert
|
||||||
from sqlalchemy.dialects.sqlite import insert as sqlite_insert
|
from sqlalchemy.dialects.sqlite import insert as sqlite_insert
|
||||||
|
|
||||||
@@ -665,11 +664,6 @@ class ChatManager:
|
|||||||
if global_config.database.database_type == "sqlite":
|
if global_config.database.database_type == "sqlite":
|
||||||
stmt = sqlite_insert(ChatStreams).values(stream_id=s_data_dict["stream_id"], **fields_to_save)
|
stmt = sqlite_insert(ChatStreams).values(stream_id=s_data_dict["stream_id"], **fields_to_save)
|
||||||
stmt = stmt.on_conflict_do_update(index_elements=["stream_id"], set_=fields_to_save)
|
stmt = stmt.on_conflict_do_update(index_elements=["stream_id"], set_=fields_to_save)
|
||||||
elif global_config.database.database_type == "mysql":
|
|
||||||
stmt = mysql_insert(ChatStreams).values(stream_id=s_data_dict["stream_id"], **fields_to_save)
|
|
||||||
stmt = stmt.on_duplicate_key_update(
|
|
||||||
**{key: value for key, value in fields_to_save.items() if key != "stream_id"}
|
|
||||||
)
|
|
||||||
elif global_config.database.database_type == "postgresql":
|
elif global_config.database.database_type == "postgresql":
|
||||||
stmt = pg_insert(ChatStreams).values(stream_id=s_data_dict["stream_id"], **fields_to_save)
|
stmt = pg_insert(ChatStreams).values(stream_id=s_data_dict["stream_id"], **fields_to_save)
|
||||||
# PostgreSQL 需要使用 constraint 参数或正确的 index_elements
|
# PostgreSQL 需要使用 constraint 参数或正确的 index_elements
|
||||||
|
|||||||
@@ -9,7 +9,6 @@
|
|||||||
|
|
||||||
支持的数据库:
|
支持的数据库:
|
||||||
- SQLite (默认)
|
- SQLite (默认)
|
||||||
- MySQL
|
|
||||||
- PostgreSQL
|
- PostgreSQL
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|||||||
@@ -2,7 +2,6 @@
|
|||||||
|
|
||||||
提供跨数据库兼容性支持,处理不同数据库之间的差异:
|
提供跨数据库兼容性支持,处理不同数据库之间的差异:
|
||||||
- SQLite: 轻量级本地数据库
|
- SQLite: 轻量级本地数据库
|
||||||
- MySQL: 高性能关系型数据库
|
|
||||||
- PostgreSQL: 功能丰富的开源数据库
|
- PostgreSQL: 功能丰富的开源数据库
|
||||||
|
|
||||||
主要职责:
|
主要职责:
|
||||||
@@ -23,7 +22,6 @@ class DatabaseDialect(Enum):
|
|||||||
"""数据库方言枚举"""
|
"""数据库方言枚举"""
|
||||||
|
|
||||||
SQLITE = "sqlite"
|
SQLITE = "sqlite"
|
||||||
MYSQL = "mysql"
|
|
||||||
POSTGRESQL = "postgresql"
|
POSTGRESQL = "postgresql"
|
||||||
|
|
||||||
|
|
||||||
@@ -68,20 +66,6 @@ DIALECT_CONFIGS: dict[DatabaseDialect, DialectConfig] = {
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
),
|
),
|
||||||
DatabaseDialect.MYSQL: DialectConfig(
|
|
||||||
dialect=DatabaseDialect.MYSQL,
|
|
||||||
ping_query="SELECT 1",
|
|
||||||
supports_returning=False, # MySQL 8.0.21+ 有限支持
|
|
||||||
supports_native_json=True, # MySQL 5.7+
|
|
||||||
supports_arrays=False,
|
|
||||||
requires_length_for_index=True, # MySQL 索引需要指定长度
|
|
||||||
default_string_length=255,
|
|
||||||
isolation_level="READ COMMITTED",
|
|
||||||
engine_kwargs={
|
|
||||||
"pool_pre_ping": True,
|
|
||||||
"pool_recycle": 3600,
|
|
||||||
},
|
|
||||||
),
|
|
||||||
DatabaseDialect.POSTGRESQL: DialectConfig(
|
DatabaseDialect.POSTGRESQL: DialectConfig(
|
||||||
dialect=DatabaseDialect.POSTGRESQL,
|
dialect=DatabaseDialect.POSTGRESQL,
|
||||||
ping_query="SELECT 1",
|
ping_query="SELECT 1",
|
||||||
@@ -113,13 +97,13 @@ class DialectAdapter:
|
|||||||
"""初始化适配器
|
"""初始化适配器
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
db_type: 数据库类型字符串 ("sqlite", "mysql", "postgresql")
|
db_type: 数据库类型字符串 ("sqlite", "postgresql")
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
cls._current_dialect = DatabaseDialect(db_type.lower())
|
cls._current_dialect = DatabaseDialect(db_type.lower())
|
||||||
cls._config = DIALECT_CONFIGS[cls._current_dialect]
|
cls._config = DIALECT_CONFIGS[cls._current_dialect]
|
||||||
except ValueError:
|
except ValueError:
|
||||||
raise ValueError(f"不支持的数据库类型: {db_type},支持的类型: sqlite, mysql, postgresql")
|
raise ValueError(f"不支持的数据库类型: {db_type},支持的类型: sqlite, postgresql")
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def get_dialect(cls) -> DatabaseDialect:
|
def get_dialect(cls) -> DatabaseDialect:
|
||||||
@@ -153,15 +137,10 @@ class DialectAdapter:
|
|||||||
"""
|
"""
|
||||||
config = cls.get_config()
|
config = cls.get_config()
|
||||||
|
|
||||||
# MySQL 索引列需要指定长度
|
|
||||||
if config.requires_length_for_index and indexed:
|
|
||||||
return String(max_length)
|
|
||||||
|
|
||||||
# SQLite 和 PostgreSQL 可以使用 Text
|
# SQLite 和 PostgreSQL 可以使用 Text
|
||||||
if config.dialect in (DatabaseDialect.SQLITE, DatabaseDialect.POSTGRESQL):
|
if config.dialect in (DatabaseDialect.SQLITE, DatabaseDialect.POSTGRESQL):
|
||||||
return Text() if not indexed else String(max_length)
|
return Text() if not indexed else String(max_length)
|
||||||
|
|
||||||
# MySQL 使用 VARCHAR
|
|
||||||
return String(max_length)
|
return String(max_length)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
@@ -189,11 +168,6 @@ class DialectAdapter:
|
|||||||
"""是否为 SQLite"""
|
"""是否为 SQLite"""
|
||||||
return cls.get_dialect() == DatabaseDialect.SQLITE
|
return cls.get_dialect() == DatabaseDialect.SQLITE
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def is_mysql(cls) -> bool:
|
|
||||||
"""是否为 MySQL"""
|
|
||||||
return cls.get_dialect() == DatabaseDialect.MYSQL
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def is_postgresql(cls) -> bool:
|
def is_postgresql(cls) -> bool:
|
||||||
"""是否为 PostgreSQL"""
|
"""是否为 PostgreSQL"""
|
||||||
@@ -211,7 +185,7 @@ def get_indexed_string_field(max_length: int = 255) -> TypeEngine:
|
|||||||
这是一个便捷函数,用于在模型定义中获取适合当前数据库的字符串类型
|
这是一个便捷函数,用于在模型定义中获取适合当前数据库的字符串类型
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
max_length: 最大长度(对于 MySQL 是必需的)
|
max_length: 最大长度
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
SQLAlchemy 类型
|
SQLAlchemy 类型
|
||||||
|
|||||||
@@ -4,7 +4,6 @@
|
|||||||
|
|
||||||
支持的数据库类型:
|
支持的数据库类型:
|
||||||
- SQLite: 轻量级本地数据库,使用 aiosqlite 驱动
|
- SQLite: 轻量级本地数据库,使用 aiosqlite 驱动
|
||||||
- MySQL: 高性能关系型数据库,使用 aiomysql 驱动
|
|
||||||
- PostgreSQL: 功能丰富的开源数据库,使用 asyncpg 驱动
|
- PostgreSQL: 功能丰富的开源数据库,使用 asyncpg 驱动
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@@ -66,9 +65,7 @@ async def get_engine() -> AsyncEngine:
|
|||||||
logger.info(f"正在初始化 {db_type.upper()} 数据库引擎...")
|
logger.info(f"正在初始化 {db_type.upper()} 数据库引擎...")
|
||||||
|
|
||||||
# 根据数据库类型构建URL和引擎参数
|
# 根据数据库类型构建URL和引擎参数
|
||||||
if db_type == "mysql":
|
if db_type == "postgresql":
|
||||||
url, engine_kwargs = _build_mysql_config(config)
|
|
||||||
elif db_type == "postgresql":
|
|
||||||
url, engine_kwargs = _build_postgresql_config(config)
|
url, engine_kwargs = _build_postgresql_config(config)
|
||||||
else:
|
else:
|
||||||
url, engine_kwargs = _build_sqlite_config(config)
|
url, engine_kwargs = _build_sqlite_config(config)
|
||||||
@@ -123,55 +120,6 @@ def _build_sqlite_config(config) -> tuple[str, dict]:
|
|||||||
return url, engine_kwargs
|
return url, engine_kwargs
|
||||||
|
|
||||||
|
|
||||||
def _build_mysql_config(config) -> tuple[str, dict]:
|
|
||||||
"""构建 MySQL 配置
|
|
||||||
|
|
||||||
Args:
|
|
||||||
config: 数据库配置对象
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
(url, engine_kwargs) 元组
|
|
||||||
"""
|
|
||||||
encoded_user = quote_plus(config.mysql_user)
|
|
||||||
encoded_password = quote_plus(config.mysql_password)
|
|
||||||
|
|
||||||
if config.mysql_unix_socket:
|
|
||||||
# Unix socket连接
|
|
||||||
encoded_socket = quote_plus(config.mysql_unix_socket)
|
|
||||||
url = (
|
|
||||||
f"mysql+aiomysql://{encoded_user}:{encoded_password}"
|
|
||||||
f"@/{config.mysql_database}"
|
|
||||||
f"?unix_socket={encoded_socket}&charset={config.mysql_charset}"
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
# TCP连接
|
|
||||||
url = (
|
|
||||||
f"mysql+aiomysql://{encoded_user}:{encoded_password}"
|
|
||||||
f"@{config.mysql_host}:{config.mysql_port}/{config.mysql_database}"
|
|
||||||
f"?charset={config.mysql_charset}"
|
|
||||||
)
|
|
||||||
|
|
||||||
engine_kwargs = {
|
|
||||||
"echo": False,
|
|
||||||
"future": True,
|
|
||||||
"pool_size": config.connection_pool_size,
|
|
||||||
"max_overflow": config.connection_pool_size * 2,
|
|
||||||
"pool_timeout": config.connection_timeout,
|
|
||||||
"pool_recycle": 3600,
|
|
||||||
"pool_pre_ping": True,
|
|
||||||
"connect_args": {
|
|
||||||
"autocommit": config.mysql_autocommit,
|
|
||||||
"charset": config.mysql_charset,
|
|
||||||
"connect_timeout": config.connection_timeout,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.info(
|
|
||||||
f"MySQL配置: {config.mysql_user}@{config.mysql_host}:{config.mysql_port}/{config.mysql_database}"
|
|
||||||
)
|
|
||||||
return url, engine_kwargs
|
|
||||||
|
|
||||||
|
|
||||||
def _build_postgresql_config(config) -> tuple[str, dict]:
|
def _build_postgresql_config(config) -> tuple[str, dict]:
|
||||||
"""构建 PostgreSQL 配置
|
"""构建 PostgreSQL 配置
|
||||||
|
|
||||||
|
|||||||
@@ -119,9 +119,6 @@ async def check_and_migrate_database(existing_engine=None):
|
|||||||
):
|
):
|
||||||
# SQLite 将布尔值存储为 0 或 1
|
# SQLite 将布尔值存储为 0 或 1
|
||||||
default_value = "1" if default_arg else "0"
|
default_value = "1" if default_arg else "0"
|
||||||
elif dialect.name == "mysql" and isinstance(default_arg, bool):
|
|
||||||
# MySQL 也使用 1/0 表示布尔值
|
|
||||||
default_value = "1" if default_arg else "0"
|
|
||||||
elif isinstance(default_arg, bool):
|
elif isinstance(default_arg, bool):
|
||||||
# PostgreSQL 使用 TRUE/FALSE
|
# PostgreSQL 使用 TRUE/FALSE
|
||||||
default_value = "TRUE" if default_arg else "FALSE"
|
default_value = "TRUE" if default_arg else "FALSE"
|
||||||
|
|||||||
@@ -5,7 +5,6 @@
|
|||||||
|
|
||||||
支持的数据库类型:
|
支持的数据库类型:
|
||||||
- SQLite: 使用 Text 类型
|
- SQLite: 使用 Text 类型
|
||||||
- MySQL: 使用 VARCHAR(max_length) 用于索引字段
|
|
||||||
- PostgreSQL: 使用 Text 类型(PostgreSQL 的 Text 类型性能与 VARCHAR 相当)
|
- PostgreSQL: 使用 Text 类型(PostgreSQL 的 Text 类型性能与 VARCHAR 相当)
|
||||||
|
|
||||||
所有模型使用统一的类型注解风格:
|
所有模型使用统一的类型注解风格:
|
||||||
@@ -31,12 +30,11 @@ def get_string_field(max_length=255, **kwargs):
|
|||||||
根据数据库类型返回合适的字符串字段类型
|
根据数据库类型返回合适的字符串字段类型
|
||||||
|
|
||||||
对于需要索引的字段:
|
对于需要索引的字段:
|
||||||
- MySQL: 必须使用 VARCHAR(max_length),因为索引需要指定长度
|
|
||||||
- PostgreSQL: 可以使用 Text,但为了兼容性使用 VARCHAR
|
- PostgreSQL: 可以使用 Text,但为了兼容性使用 VARCHAR
|
||||||
- SQLite: 可以使用 Text,无长度限制
|
- SQLite: 可以使用 Text,无长度限制
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
max_length: 最大长度(对于 MySQL 是必需的)
|
max_length: 最大长度
|
||||||
**kwargs: 传递给 String/Text 的额外参数
|
**kwargs: 传递给 String/Text 的额外参数
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
@@ -47,11 +45,8 @@ def get_string_field(max_length=255, **kwargs):
|
|||||||
assert global_config is not None
|
assert global_config is not None
|
||||||
db_type = global_config.database.database_type
|
db_type = global_config.database.database_type
|
||||||
|
|
||||||
# MySQL 索引需要指定长度的 VARCHAR
|
|
||||||
if db_type == "mysql":
|
|
||||||
return String(max_length, **kwargs)
|
|
||||||
# PostgreSQL 可以使用 Text,但为了跨数据库迁移兼容性,使用 VARCHAR
|
# PostgreSQL 可以使用 Text,但为了跨数据库迁移兼容性,使用 VARCHAR
|
||||||
elif db_type == "postgresql":
|
if db_type == "postgresql":
|
||||||
return String(max_length, **kwargs)
|
return String(max_length, **kwargs)
|
||||||
# SQLite 使用 Text(无长度限制)
|
# SQLite 使用 Text(无长度限制)
|
||||||
else:
|
else:
|
||||||
|
|||||||
@@ -4,7 +4,6 @@
|
|||||||
|
|
||||||
支持的数据库类型:
|
支持的数据库类型:
|
||||||
- SQLite: 设置 PRAGMA 参数优化并发
|
- SQLite: 设置 PRAGMA 参数优化并发
|
||||||
- MySQL: 无特殊会话设置
|
|
||||||
- PostgreSQL: 可选设置 schema 搜索路径
|
- PostgreSQL: 可选设置 schema 搜索路径
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@@ -79,7 +78,6 @@ async def _apply_session_settings(session: AsyncSession, db_type: str) -> None:
|
|||||||
schema = global_config.database.postgresql_schema
|
schema = global_config.database.postgresql_schema
|
||||||
if schema and schema != "public":
|
if schema and schema != "public":
|
||||||
await session.execute(text(f"SET search_path TO {schema}"))
|
await session.execute(text(f"SET search_path TO {schema}"))
|
||||||
# MySQL 通常不需要会话级别的特殊设置
|
|
||||||
except Exception:
|
except Exception:
|
||||||
# 复用连接时设置可能已存在,忽略错误
|
# 复用连接时设置可能已存在,忽略错误
|
||||||
pass
|
pass
|
||||||
@@ -93,7 +91,6 @@ async def get_db_session() -> AsyncGenerator[AsyncSession, None]:
|
|||||||
|
|
||||||
支持的数据库:
|
支持的数据库:
|
||||||
- SQLite: 自动设置 busy_timeout 和外键约束
|
- SQLite: 自动设置 busy_timeout 和外键约束
|
||||||
- MySQL: 直接使用,无特殊设置
|
|
||||||
- PostgreSQL: 支持自定义 schema
|
- PostgreSQL: 支持自定义 schema
|
||||||
|
|
||||||
使用示例:
|
使用示例:
|
||||||
@@ -132,7 +129,7 @@ async def get_db_session_direct() -> AsyncGenerator[AsyncSession, None]:
|
|||||||
- 正常退出时自动提交事务
|
- 正常退出时自动提交事务
|
||||||
- 发生异常时自动回滚事务
|
- 发生异常时自动回滚事务
|
||||||
- 如果用户代码已手动调用 commit/rollback,再次调用是安全的
|
- 如果用户代码已手动调用 commit/rollback,再次调用是安全的
|
||||||
- 适用于所有数据库类型(SQLite, MySQL, PostgreSQL)
|
- 适用于所有数据库类型(SQLite, PostgreSQL)
|
||||||
|
|
||||||
Yields:
|
Yields:
|
||||||
AsyncSession: SQLAlchemy异步会话对象
|
AsyncSession: SQLAlchemy异步会话对象
|
||||||
|
|||||||
@@ -128,7 +128,7 @@ class ConnectionPoolManager:
|
|||||||
- 正常退出时自动提交事务
|
- 正常退出时自动提交事务
|
||||||
- 发生异常时自动回滚事务
|
- 发生异常时自动回滚事务
|
||||||
- 如果用户代码已手动调用 commit/rollback,再次调用是安全的(空操作)
|
- 如果用户代码已手动调用 commit/rollback,再次调用是安全的(空操作)
|
||||||
- 支持所有数据库类型:SQLite、MySQL、PostgreSQL
|
- 支持所有数据库类型:SQLite、PostgreSQL
|
||||||
"""
|
"""
|
||||||
connection_info = None
|
connection_info = None
|
||||||
|
|
||||||
@@ -158,7 +158,7 @@ class ConnectionPoolManager:
|
|||||||
yield connection_info.session
|
yield connection_info.session
|
||||||
|
|
||||||
# 🔧 正常退出时提交事务
|
# 🔧 正常退出时提交事务
|
||||||
# 这对所有数据库(SQLite、MySQL、PostgreSQL)都很重要
|
# 这对所有数据库(SQLite、PostgreSQL)都很重要
|
||||||
# 因为 SQLAlchemy 默认使用事务模式,不会自动提交
|
# 因为 SQLAlchemy 默认使用事务模式,不会自动提交
|
||||||
# 注意:如果用户代码已调用 commit(),这里的 commit() 是安全的空操作
|
# 注意:如果用户代码已调用 commit(),这里的 commit() 是安全的空操作
|
||||||
if connection_info and connection_info.session:
|
if connection_info and connection_info.session:
|
||||||
|
|||||||
@@ -22,9 +22,24 @@ from logging.handlers import RotatingFileHandler
|
|||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import TYPE_CHECKING
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
import objgraph
|
|
||||||
import psutil
|
import psutil
|
||||||
from pympler import muppy, summary
|
|
||||||
|
# objgraph 是可选依赖,用于对象增长监控
|
||||||
|
try:
|
||||||
|
import objgraph
|
||||||
|
OBJGRAPH_AVAILABLE = True
|
||||||
|
except ImportError:
|
||||||
|
OBJGRAPH_AVAILABLE = False
|
||||||
|
objgraph = None # type: ignore[assignment]
|
||||||
|
|
||||||
|
# pympler 是可选依赖,用于类型内存分析
|
||||||
|
try:
|
||||||
|
from pympler import muppy, summary
|
||||||
|
PYMPLER_AVAILABLE = True
|
||||||
|
except ImportError:
|
||||||
|
PYMPLER_AVAILABLE = False
|
||||||
|
muppy = None
|
||||||
|
summary = None
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from psutil import Process
|
from psutil import Process
|
||||||
@@ -73,6 +88,12 @@ def _setup_mem_logger() -> logging.Logger:
|
|||||||
|
|
||||||
logger = _setup_mem_logger()
|
logger = _setup_mem_logger()
|
||||||
|
|
||||||
|
# 启动时记录可选依赖的可用性
|
||||||
|
if not OBJGRAPH_AVAILABLE:
|
||||||
|
logger.warning("objgraph 未安装,对象增长分析功能不可用 (pip install objgraph)")
|
||||||
|
if not PYMPLER_AVAILABLE:
|
||||||
|
logger.warning("pympler 未安装,类型内存分析功能不可用 (pip install Pympler)")
|
||||||
|
|
||||||
_process: "Process" = psutil.Process()
|
_process: "Process" = psutil.Process()
|
||||||
_last_snapshot: tracemalloc.Snapshot | None = None
|
_last_snapshot: tracemalloc.Snapshot | None = None
|
||||||
_last_type_summary: list | None = None
|
_last_type_summary: list | None = None
|
||||||
@@ -153,6 +174,10 @@ def log_object_growth(limit: int = 20) -> None:
|
|||||||
Args:
|
Args:
|
||||||
limit: 显示的最大增长类型数
|
limit: 显示的最大增长类型数
|
||||||
"""
|
"""
|
||||||
|
if not OBJGRAPH_AVAILABLE or objgraph is None:
|
||||||
|
logger.warning("objgraph not available, skipping object growth analysis")
|
||||||
|
return
|
||||||
|
|
||||||
logger.info("==== Objgraph growth (top %s) ====", limit)
|
logger.info("==== Objgraph growth (top %s) ====", limit)
|
||||||
try:
|
try:
|
||||||
# objgraph.show_growth 默认输出到 stdout,需要捕获输出
|
# objgraph.show_growth 默认输出到 stdout,需要捕获输出
|
||||||
@@ -182,6 +207,10 @@ def log_type_memory_diff() -> None:
|
|||||||
"""使用 Pympler 查看各类型对象占用的内存变化"""
|
"""使用 Pympler 查看各类型对象占用的内存变化"""
|
||||||
global _last_type_summary
|
global _last_type_summary
|
||||||
|
|
||||||
|
if not PYMPLER_AVAILABLE or muppy is None or summary is None:
|
||||||
|
logger.warning("pympler not available, skipping type memory analysis")
|
||||||
|
return
|
||||||
|
|
||||||
import io
|
import io
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
@@ -338,6 +367,10 @@ def debug_leak_for_type(type_name: str, max_depth: int = 5, filename: str | None
|
|||||||
Returns:
|
Returns:
|
||||||
是否成功生成引用图
|
是否成功生成引用图
|
||||||
"""
|
"""
|
||||||
|
if not OBJGRAPH_AVAILABLE or objgraph is None:
|
||||||
|
logger.warning("objgraph not available, cannot generate backrefs graph")
|
||||||
|
return False
|
||||||
|
|
||||||
if filename is None:
|
if filename is None:
|
||||||
filename = f"{type_name}_backrefs.png"
|
filename = f"{type_name}_backrefs.png"
|
||||||
|
|
||||||
|
|||||||
@@ -54,8 +54,12 @@ class Server:
|
|||||||
|
|
||||||
# 配置 CORS
|
# 配置 CORS
|
||||||
origins = [
|
origins = [
|
||||||
"http://localhost:3000", # 允许的前端源
|
"http://localhost:3000",
|
||||||
"http://127.0.0.1:3000",
|
"http://127.0.0.1:3000",
|
||||||
|
"http://localhost:11451",
|
||||||
|
"http://127.0.0.1:11451",
|
||||||
|
"http://localhost:3001",
|
||||||
|
"http://127.0.0.1:3001",
|
||||||
# 在生产环境中,您应该添加实际的前端域名
|
# 在生产环境中,您应该添加实际的前端域名
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|||||||
@@ -65,7 +65,7 @@ TEMPLATE_DIR = os.path.join(PROJECT_ROOT, "template")
|
|||||||
|
|
||||||
# 考虑到,实际上配置文件中的mai_version是不会自动更新的,所以采用硬编码
|
# 考虑到,实际上配置文件中的mai_version是不会自动更新的,所以采用硬编码
|
||||||
# 对该字段的更新,请严格参照语义化版本规范:https://semver.org/lang/zh-CN/
|
# 对该字段的更新,请严格参照语义化版本规范:https://semver.org/lang/zh-CN/
|
||||||
MMC_VERSION = "0.13.0"
|
MMC_VERSION = "0.13.1-alpha.1"
|
||||||
|
|
||||||
# 全局配置变量
|
# 全局配置变量
|
||||||
_CONFIG_INITIALIZED = False
|
_CONFIG_INITIALIZED = False
|
||||||
|
|||||||
@@ -16,26 +16,9 @@ from src.config.config_base import ValidatedConfigBase
|
|||||||
class DatabaseConfig(ValidatedConfigBase):
|
class DatabaseConfig(ValidatedConfigBase):
|
||||||
"""数据库配置类"""
|
"""数据库配置类"""
|
||||||
|
|
||||||
database_type: Literal["sqlite", "mysql", "postgresql"] = Field(default="sqlite", description="数据库类型")
|
database_type: Literal["sqlite", "postgresql"] = Field(default="sqlite", description="数据库类型")
|
||||||
sqlite_path: str = Field(default="data/MaiBot.db", description="SQLite数据库文件路径")
|
sqlite_path: str = Field(default="data/MaiBot.db", description="SQLite数据库文件路径")
|
||||||
|
|
||||||
# MySQL 配置
|
|
||||||
mysql_host: str = Field(default="localhost", description="MySQL服务器地址")
|
|
||||||
mysql_port: int = Field(default=3306, ge=1, le=65535, description="MySQL服务器端口")
|
|
||||||
mysql_database: str = Field(default="maibot", description="MySQL数据库名")
|
|
||||||
mysql_user: str = Field(default="root", description="MySQL用户名")
|
|
||||||
mysql_password: str = Field(default="", description="MySQL密码")
|
|
||||||
mysql_charset: str = Field(default="utf8mb4", description="MySQL字符集")
|
|
||||||
mysql_unix_socket: str = Field(default="", description="MySQL Unix套接字路径")
|
|
||||||
mysql_ssl_mode: Literal["DISABLED", "PREFERRED", "REQUIRED", "VERIFY_CA", "VERIFY_IDENTITY"] = Field(
|
|
||||||
default="DISABLED", description="SSL模式"
|
|
||||||
)
|
|
||||||
mysql_ssl_ca: str = Field(default="", description="SSL CA证书路径")
|
|
||||||
mysql_ssl_cert: str = Field(default="", description="SSL客户端证书路径")
|
|
||||||
mysql_ssl_key: str = Field(default="", description="SSL密钥路径")
|
|
||||||
mysql_autocommit: bool = Field(default=True, description="自动提交事务")
|
|
||||||
mysql_sql_mode: str = Field(default="TRADITIONAL", description="SQL模式")
|
|
||||||
|
|
||||||
# PostgreSQL 配置
|
# PostgreSQL 配置
|
||||||
postgresql_host: str = Field(default="localhost", description="PostgreSQL服务器地址")
|
postgresql_host: str = Field(default="localhost", description="PostgreSQL服务器地址")
|
||||||
postgresql_port: int = Field(default=5432, ge=1, le=65535, description="PostgreSQL服务器端口")
|
postgresql_port: int = Field(default=5432, ge=1, le=65535, description="PostgreSQL服务器端口")
|
||||||
|
|||||||
@@ -187,8 +187,8 @@ class ShortTermMemoryManager:
|
|||||||
"importance": 0.7,
|
"importance": 0.7,
|
||||||
"attributes": {{
|
"attributes": {{
|
||||||
"time": "时间信息",
|
"time": "时间信息",
|
||||||
"attribute1": "其他属性1"
|
"attribute1": "其他属性1",
|
||||||
"attribute2": "其他属性2"
|
"attribute2": "其他属性2",
|
||||||
...
|
...
|
||||||
}}
|
}}
|
||||||
}}
|
}}
|
||||||
|
|||||||
@@ -560,7 +560,7 @@ class ComponentRegistry:
|
|||||||
component_instance = router_class()
|
component_instance = router_class()
|
||||||
server = get_global_server()
|
server = get_global_server()
|
||||||
# 生成唯一的 URL 前缀,格式为 /plugins/{plugin_name}
|
# 生成唯一的 URL 前缀,格式为 /plugins/{plugin_name}
|
||||||
prefix = f"/plugins/{info.plugin_name}"
|
prefix = f"/plugins/{info.plugin_name}/{info.name}"
|
||||||
# 将插件的路由包含到主应用中
|
# 将插件的路由包含到主应用中
|
||||||
server.app.include_router(component_instance.router, prefix=prefix, tags=[info.plugin_name])
|
server.app.include_router(component_instance.router, prefix=prefix, tags=[info.plugin_name])
|
||||||
|
|
||||||
|
|||||||
@@ -61,7 +61,6 @@ INSTALL_NAME_TO_IMPORT_NAME = {
|
|||||||
"passlib": "passlib", # 密码哈希库
|
"passlib": "passlib", # 密码哈希库
|
||||||
"bcrypt": "bcrypt", # Bcrypt密码哈希
|
"bcrypt": "bcrypt", # Bcrypt密码哈希
|
||||||
# ============== 数据库 (Database) ==============
|
# ============== 数据库 (Database) ==============
|
||||||
"mysql-connector-python": "mysql.connector", # MySQL官方驱动
|
|
||||||
"psycopg2-binary": "psycopg2", # PostgreSQL驱动 (二进制)
|
"psycopg2-binary": "psycopg2", # PostgreSQL驱动 (二进制)
|
||||||
"pymongo": "pymongo", # MongoDB驱动
|
"pymongo": "pymongo", # MongoDB驱动
|
||||||
"redis": "redis", # Redis客户端
|
"redis": "redis", # Redis客户端
|
||||||
|
|||||||
@@ -96,7 +96,6 @@ class ReplyAction(BaseAction):
|
|||||||
# 发送回复
|
# 发送回复
|
||||||
reply_text = await self._send_response(response_set)
|
reply_text = await self._send_response(response_set)
|
||||||
|
|
||||||
logger.info(f"{self.log_prefix} reply 动作执行成功")
|
|
||||||
return True, reply_text
|
return True, reply_text
|
||||||
|
|
||||||
except asyncio.CancelledError:
|
except asyncio.CancelledError:
|
||||||
@@ -219,7 +218,6 @@ class RespondAction(BaseAction):
|
|||||||
# 发送回复(respond 默认不引用)
|
# 发送回复(respond 默认不引用)
|
||||||
reply_text = await self._send_response(response_set)
|
reply_text = await self._send_response(response_set)
|
||||||
|
|
||||||
logger.info(f"{self.log_prefix} respond 动作执行成功")
|
|
||||||
return True, reply_text
|
return True, reply_text
|
||||||
|
|
||||||
except asyncio.CancelledError:
|
except asyncio.CancelledError:
|
||||||
|
|||||||
@@ -126,7 +126,6 @@ class ChatStreamImpressionTool(BaseTool):
|
|||||||
updates.append(f"兴趣分: {final_impression['stream_interest_score']:.2f}")
|
updates.append(f"兴趣分: {final_impression['stream_interest_score']:.2f}")
|
||||||
|
|
||||||
result_text = f"已更新聊天流 {stream_id} 的印象:\n" + "\n".join(updates)
|
result_text = f"已更新聊天流 {stream_id} 的印象:\n" + "\n".join(updates)
|
||||||
logger.info(f"聊天流印象更新成功: {stream_id}")
|
|
||||||
|
|
||||||
return {"type": "chat_stream_impression_update", "id": stream_id, "content": result_text}
|
return {"type": "chat_stream_impression_update", "id": stream_id, "content": result_text}
|
||||||
|
|
||||||
@@ -214,7 +213,7 @@ class ChatStreamImpressionTool(BaseTool):
|
|||||||
await cache.delete(generate_cache_key("stream_impression", stream_id))
|
await cache.delete(generate_cache_key("stream_impression", stream_id))
|
||||||
await cache.delete(generate_cache_key("chat_stream", stream_id))
|
await cache.delete(generate_cache_key("chat_stream", stream_id))
|
||||||
|
|
||||||
logger.info(f"聊天流印象已更新到数据库: {stream_id}")
|
logger.debug(f"聊天流印象已更新到数据库: {stream_id}")
|
||||||
else:
|
else:
|
||||||
error_msg = f"聊天流 {stream_id} 不存在于数据库中,无法更新印象"
|
error_msg = f"聊天流 {stream_id} 不存在于数据库中,无法更新印象"
|
||||||
logger.error(error_msg)
|
logger.error(error_msg)
|
||||||
|
|||||||
@@ -48,14 +48,27 @@ class UserProfileTool(BaseTool):
|
|||||||
使用场景:
|
使用场景:
|
||||||
1. TA告诉你个人信息(生日、职业、城市等)→ 填 key_info_type 和 key_info_value
|
1. TA告诉你个人信息(生日、职业、城市等)→ 填 key_info_type 和 key_info_value
|
||||||
2. TA的信息有变化(搬家、换工作等)→ 会自动更新旧信息
|
2. TA的信息有变化(搬家、换工作等)→ 会自动更新旧信息
|
||||||
3. 你对TA有了新的认识或感受
|
3. 你对TA有了新的认识或感受 → 填 impression_hint
|
||||||
4. 想更新对TA的印象
|
4. 想记录TA真正的兴趣爱好 → 填 preference
|
||||||
|
|
||||||
⚠️ 重要注意:
|
## ⛔ 别名(alias)规则:
|
||||||
- 别名必须是TA自己明确表示想被这样称呼的(如"你叫我xx吧"、"我的昵称是xx")
|
- 只填TA明确要求被称呼的真实昵称
|
||||||
- 短期的撤娇/玩笑称呼不是别名(如"哈哈我是小笨蛋"这种玩笑不算)
|
- 必须是TA主动说"叫我xxx"或"我的昵称是xxx"
|
||||||
- 关键信息必须是具体值(如"11月23日"),不要填描述性文字
|
- 聊天中的玩笑称呼、撒娇称呼、临时戏称一律不填
|
||||||
- 游戏剧情/故事不是TA本人的信息
|
- 你给TA起的爱称不算别名
|
||||||
|
|
||||||
|
## ⛔ 偏好(preference)规则:
|
||||||
|
- 只填可以作为兴趣爱好的名词(如:编程、摄影、音乐、游戏)
|
||||||
|
- 必须是TA在现实中真正从事或喜欢的活动/领域
|
||||||
|
- 聊天互动方式不是爱好(撒娇、亲亲、被夸奖等不填)
|
||||||
|
- 你们之间的私密互动不是爱好
|
||||||
|
- 情感状态不是爱好
|
||||||
|
|
||||||
|
## ⛔ 关键信息(key_info)规则:
|
||||||
|
- 只填客观可验证的事实信息
|
||||||
|
- 必须是具体的值(日期、地点、职业名称)
|
||||||
|
- 你的主观感受不是TA的信息
|
||||||
|
- 关系描述不是信息
|
||||||
|
|
||||||
此工具在后台异步执行,不影响回复速度。"""
|
此工具在后台异步执行,不影响回复速度。"""
|
||||||
parameters = [
|
parameters = [
|
||||||
|
|||||||
@@ -88,6 +88,93 @@ class NapcatAdapter(BaseAdapter):
|
|||||||
# 注册 utils 内部使用的适配器实例,便于工具方法自动获取 WS
|
# 注册 utils 内部使用的适配器实例,便于工具方法自动获取 WS
|
||||||
handler_utils.register_adapter(self)
|
handler_utils.register_adapter(self)
|
||||||
|
|
||||||
|
def _should_process_event(self, raw: Dict[str, Any]) -> bool:
|
||||||
|
"""
|
||||||
|
检查事件是否应该被处理(黑白名单过滤)
|
||||||
|
|
||||||
|
此方法在 from_platform_message 顶层调用,对所有类型的事件(消息、通知、元事件)进行过滤。
|
||||||
|
|
||||||
|
Args:
|
||||||
|
raw: OneBot 原始事件数据
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: True表示应该处理,False表示应该过滤
|
||||||
|
"""
|
||||||
|
if not self.plugin:
|
||||||
|
return True
|
||||||
|
|
||||||
|
plugin_config = self.plugin.config
|
||||||
|
if not plugin_config:
|
||||||
|
return True # 如果没有配置,默认处理所有事件
|
||||||
|
|
||||||
|
features_config = plugin_config.get("features", {})
|
||||||
|
post_type = raw.get("post_type")
|
||||||
|
|
||||||
|
# 获取用户信息(根据事件类型从不同字段获取)
|
||||||
|
user_id: str = ""
|
||||||
|
if post_type == "message":
|
||||||
|
sender_info = raw.get("sender", {})
|
||||||
|
user_id = str(sender_info.get("user_id", ""))
|
||||||
|
elif post_type == "notice":
|
||||||
|
user_id = str(raw.get("user_id", ""))
|
||||||
|
else:
|
||||||
|
# 元事件或其他类型不需要过滤
|
||||||
|
return True
|
||||||
|
|
||||||
|
# 检查全局封禁用户列表
|
||||||
|
ban_user_ids = [str(item) for item in features_config.get("ban_user_id", [])]
|
||||||
|
if user_id and user_id in ban_user_ids:
|
||||||
|
logger.debug(f"用户 {user_id} 在全局封禁列表中,事件被过滤")
|
||||||
|
return False
|
||||||
|
|
||||||
|
# 检查是否屏蔽其他QQ机器人(仅对消息事件生效)
|
||||||
|
if post_type == "message" and features_config.get("ban_qq_bot", False):
|
||||||
|
sender_info = raw.get("sender", {})
|
||||||
|
role = sender_info.get("role", "")
|
||||||
|
if role == "admin" or "bot" in str(sender_info).lower():
|
||||||
|
logger.debug(f"检测到机器人消息 {user_id},事件被过滤")
|
||||||
|
return False
|
||||||
|
|
||||||
|
# 获取消息类型(消息事件使用 message_type,通知事件根据 group_id 判断)
|
||||||
|
message_type = raw.get("message_type")
|
||||||
|
group_id = raw.get("group_id")
|
||||||
|
|
||||||
|
# 如果是通知事件,根据是否有 group_id 判断是群通知还是私聊通知
|
||||||
|
if post_type == "notice":
|
||||||
|
message_type = "group" if group_id else "private"
|
||||||
|
|
||||||
|
# 群聊/群通知过滤
|
||||||
|
if message_type == "group" and group_id:
|
||||||
|
group_id_str = str(group_id)
|
||||||
|
group_list_type = features_config.get("group_list_type", "blacklist")
|
||||||
|
group_list = [str(item) for item in features_config.get("group_list", [])]
|
||||||
|
|
||||||
|
if group_list_type == "blacklist":
|
||||||
|
if group_id_str in group_list:
|
||||||
|
logger.debug(f"群聊 {group_id_str} 在黑名单中,事件被过滤")
|
||||||
|
return False
|
||||||
|
else: # whitelist
|
||||||
|
if group_id_str not in group_list:
|
||||||
|
logger.debug(f"群聊 {group_id_str} 不在白名单中,事件被过滤")
|
||||||
|
return False
|
||||||
|
|
||||||
|
# 私聊/私聊通知过滤
|
||||||
|
elif message_type == "private":
|
||||||
|
private_list_type = features_config.get("private_list_type", "blacklist")
|
||||||
|
private_list = [str(item) for item in features_config.get("private_list", [])]
|
||||||
|
|
||||||
|
if private_list_type == "blacklist":
|
||||||
|
if user_id in private_list:
|
||||||
|
logger.debug(f"私聊用户 {user_id} 在黑名单中,事件被过滤")
|
||||||
|
return False
|
||||||
|
else: # whitelist
|
||||||
|
if user_id not in private_list:
|
||||||
|
logger.debug(f"私聊用户 {user_id} 不在白名单中,事件被过滤")
|
||||||
|
return False
|
||||||
|
|
||||||
|
# 通过所有过滤条件
|
||||||
|
return True
|
||||||
|
|
||||||
async def on_adapter_loaded(self) -> None:
|
async def on_adapter_loaded(self) -> None:
|
||||||
"""适配器加载时的初始化"""
|
"""适配器加载时的初始化"""
|
||||||
logger.info("Napcat 适配器正在启动...")
|
logger.info("Napcat 适配器正在启动...")
|
||||||
@@ -161,6 +248,8 @@ class NapcatAdapter(BaseAdapter):
|
|||||||
- notice 事件 → 通知(戳一戳、表情回复等)
|
- notice 事件 → 通知(戳一戳、表情回复等)
|
||||||
- meta_event 事件 → 元事件(心跳、生命周期)
|
- meta_event 事件 → 元事件(心跳、生命周期)
|
||||||
- API 响应 → 存入响应池
|
- API 响应 → 存入响应池
|
||||||
|
|
||||||
|
注意:黑白名单等过滤机制在此方法最开始执行,确保所有类型的事件都能被过滤。
|
||||||
"""
|
"""
|
||||||
post_type = raw.get("post_type")
|
post_type = raw.get("post_type")
|
||||||
|
|
||||||
@@ -171,6 +260,11 @@ class NapcatAdapter(BaseAdapter):
|
|||||||
future = self._response_pool[echo]
|
future = self._response_pool[echo]
|
||||||
if not future.done():
|
if not future.done():
|
||||||
future.set_result(raw)
|
future.set_result(raw)
|
||||||
|
return None
|
||||||
|
|
||||||
|
# 顶层过滤:黑白名单等过滤机制
|
||||||
|
if not self._should_process_event(raw):
|
||||||
|
return None
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# 消息事件
|
# 消息事件
|
||||||
|
|||||||
@@ -39,79 +39,6 @@ class MessageHandler:
|
|||||||
"""设置插件配置"""
|
"""设置插件配置"""
|
||||||
self.plugin_config = config
|
self.plugin_config = config
|
||||||
|
|
||||||
def _should_process_message(self, raw: Dict[str, Any]) -> bool:
|
|
||||||
"""
|
|
||||||
检查消息是否应该被处理(黑白名单过滤)
|
|
||||||
|
|
||||||
Args:
|
|
||||||
raw: OneBot 原始消息数据
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
bool: True表示应该处理,False表示应该过滤
|
|
||||||
"""
|
|
||||||
if not self.plugin_config:
|
|
||||||
return True # 如果没有配置,默认处理所有消息
|
|
||||||
|
|
||||||
features_config = self.plugin_config.get("features", {})
|
|
||||||
|
|
||||||
# 获取消息基本信息
|
|
||||||
message_type = raw.get("message_type")
|
|
||||||
sender_info = raw.get("sender", {})
|
|
||||||
user_id = str(sender_info.get("user_id", ""))
|
|
||||||
|
|
||||||
# 检查全局封禁用户列表
|
|
||||||
ban_user_ids = [str(item) for item in features_config.get("ban_user_id", [])]
|
|
||||||
if user_id in ban_user_ids:
|
|
||||||
logger.debug(f"用户 {user_id} 在全局封禁列表中,消息被过滤")
|
|
||||||
return False
|
|
||||||
|
|
||||||
# 检查是否屏蔽其他QQ机器人
|
|
||||||
if features_config.get("ban_qq_bot", False):
|
|
||||||
# 判断是否为机器人消息:通常通过sender中的role字段或其他标识
|
|
||||||
role = sender_info.get("role", "")
|
|
||||||
if role == "admin" or "bot" in str(sender_info).lower():
|
|
||||||
logger.debug(f"检测到机器人消息 {user_id},消息被过滤")
|
|
||||||
return False
|
|
||||||
|
|
||||||
# 群聊消息处理
|
|
||||||
if message_type == "group":
|
|
||||||
group_id = str(raw.get("group_id", ""))
|
|
||||||
|
|
||||||
# 获取群聊配置
|
|
||||||
group_list_type = features_config.get("group_list_type", "blacklist")
|
|
||||||
group_list = [str(item) for item in features_config.get("group_list", [])]
|
|
||||||
|
|
||||||
if group_list_type == "blacklist":
|
|
||||||
# 黑名单模式:如果在黑名单中就过滤
|
|
||||||
if group_id in group_list:
|
|
||||||
logger.debug(f"群聊 {group_id} 在黑名单中,消息被过滤")
|
|
||||||
return False
|
|
||||||
else: # whitelist
|
|
||||||
# 白名单模式:如果不在白名单中就过滤
|
|
||||||
if group_id not in group_list:
|
|
||||||
logger.debug(f"群聊 {group_id} 不在白名单中,消息被过滤")
|
|
||||||
return False
|
|
||||||
|
|
||||||
# 私聊消息处理
|
|
||||||
elif message_type == "private":
|
|
||||||
# 获取私聊配置
|
|
||||||
private_list_type = features_config.get("private_list_type", "blacklist")
|
|
||||||
private_list = [str(item) for item in features_config.get("private_list", [])]
|
|
||||||
|
|
||||||
if private_list_type == "blacklist":
|
|
||||||
# 黑名单模式:如果在黑名单中就过滤
|
|
||||||
if user_id in private_list:
|
|
||||||
logger.debug(f"私聊用户 {user_id} 在黑名单中,消息被过滤")
|
|
||||||
return False
|
|
||||||
else: # whitelist
|
|
||||||
# 白名单模式:如果不在白名单中就过滤
|
|
||||||
if user_id not in private_list:
|
|
||||||
logger.debug(f"私聊用户 {user_id} 不在白名单中,消息被过滤")
|
|
||||||
return False
|
|
||||||
|
|
||||||
# 通过所有过滤条件
|
|
||||||
return True
|
|
||||||
|
|
||||||
async def handle_raw_message(self, raw: Dict[str, Any]):
|
async def handle_raw_message(self, raw: Dict[str, Any]):
|
||||||
"""
|
"""
|
||||||
处理原始消息并转换为 MessageEnvelope
|
处理原始消息并转换为 MessageEnvelope
|
||||||
@@ -120,18 +47,17 @@ class MessageHandler:
|
|||||||
raw: OneBot 原始消息数据
|
raw: OneBot 原始消息数据
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
MessageEnvelope (dict) or None (if message is filtered)
|
MessageEnvelope (dict) or None
|
||||||
|
|
||||||
|
Note:
|
||||||
|
黑白名单过滤已移动到 NapcatAdapter.from_platform_message 顶层执行,
|
||||||
|
确保所有类型的事件(消息、通知等)都能被统一过滤。
|
||||||
"""
|
"""
|
||||||
|
|
||||||
message_type = raw.get("message_type")
|
message_type = raw.get("message_type")
|
||||||
message_id = str(raw.get("message_id", ""))
|
message_id = str(raw.get("message_id", ""))
|
||||||
message_time = time.time()
|
message_time = time.time()
|
||||||
|
|
||||||
# 黑白名单过滤
|
|
||||||
if not self._should_process_message(raw):
|
|
||||||
logger.debug(f"消息被黑白名单过滤丢弃: message_id={message_id}")
|
|
||||||
return None
|
|
||||||
|
|
||||||
msg_builder = MessageBuilder()
|
msg_builder = MessageBuilder()
|
||||||
|
|
||||||
# 构造用户信息
|
# 构造用户信息
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
[inner]
|
[inner]
|
||||||
version = "7.9.5"
|
version = "7.9.6"
|
||||||
|
|
||||||
#----以下是给开发人员阅读的,如果你只是部署了MoFox-Bot,不需要阅读----
|
#----以下是给开发人员阅读的,如果你只是部署了MoFox-Bot,不需要阅读----
|
||||||
#如果你想要修改配置文件,请递增version的值
|
#如果你想要修改配置文件,请递增version的值
|
||||||
@@ -12,30 +12,11 @@ version = "7.9.5"
|
|||||||
#----以上是给开发人员阅读的,如果你只是部署了MoFox-Bot,不需要阅读----
|
#----以上是给开发人员阅读的,如果你只是部署了MoFox-Bot,不需要阅读----
|
||||||
|
|
||||||
[database]# 数据库配置
|
[database]# 数据库配置
|
||||||
database_type = "sqlite" # 数据库类型,支持 "sqlite"、"mysql" 或 "postgresql"
|
database_type = "sqlite" # 数据库类型,支持 "sqlite" 或 "postgresql"
|
||||||
|
|
||||||
# SQLite 配置(当 database_type = "sqlite" 时使用)
|
# SQLite 配置(当 database_type = "sqlite" 时使用)
|
||||||
sqlite_path = "data/MaiBot.db" # SQLite数据库文件路径
|
sqlite_path = "data/MaiBot.db" # SQLite数据库文件路径
|
||||||
|
|
||||||
# MySQL 配置(当 database_type = "mysql" 时使用)
|
|
||||||
mysql_host = "localhost" # MySQL服务器地址
|
|
||||||
mysql_port = 3306 # MySQL服务器端口
|
|
||||||
mysql_database = "maibot" # MySQL数据库名
|
|
||||||
mysql_user = "root" # MySQL用户名
|
|
||||||
mysql_password = "" # MySQL密码
|
|
||||||
mysql_charset = "utf8mb4" # MySQL字符集
|
|
||||||
mysql_unix_socket = "" # MySQL Unix套接字路径(可选,用于本地连接,优先于host/port)
|
|
||||||
|
|
||||||
# MySQL SSL 配置
|
|
||||||
mysql_ssl_mode = "DISABLED" # SSL模式: DISABLED, PREFERRED, REQUIRED, VERIFY_CA, VERIFY_IDENTITY
|
|
||||||
mysql_ssl_ca = "" # SSL CA证书路径
|
|
||||||
mysql_ssl_cert = "" # SSL客户端证书路径
|
|
||||||
mysql_ssl_key = "" # SSL客户端密钥路径
|
|
||||||
|
|
||||||
# MySQL 高级配置
|
|
||||||
mysql_autocommit = true # 自动提交事务
|
|
||||||
mysql_sql_mode = "TRADITIONAL" # SQL模式
|
|
||||||
|
|
||||||
# PostgreSQL 配置(当 database_type = "postgresql" 时使用)
|
# PostgreSQL 配置(当 database_type = "postgresql" 时使用)
|
||||||
postgresql_host = "localhost" # PostgreSQL服务器地址
|
postgresql_host = "localhost" # PostgreSQL服务器地址
|
||||||
postgresql_port = 5432 # PostgreSQL服务器端口
|
postgresql_port = 5432 # PostgreSQL服务器端口
|
||||||
@@ -50,7 +31,7 @@ postgresql_ssl_ca = "" # SSL CA证书路径
|
|||||||
postgresql_ssl_cert = "" # SSL客户端证书路径
|
postgresql_ssl_cert = "" # SSL客户端证书路径
|
||||||
postgresql_ssl_key = "" # SSL客户端密钥路径
|
postgresql_ssl_key = "" # SSL客户端密钥路径
|
||||||
|
|
||||||
# 连接池配置(MySQL 和 PostgreSQL 有效)
|
# 连接池配置(PostgreSQL 有效)
|
||||||
connection_pool_size = 10 # 连接池大小
|
connection_pool_size = 10 # 连接池大小
|
||||||
connection_timeout = 10 # 连接超时时间(秒)
|
connection_timeout = 10 # 连接超时时间(秒)
|
||||||
|
|
||||||
@@ -131,7 +112,7 @@ safety_guidelines = [
|
|||||||
]
|
]
|
||||||
|
|
||||||
compress_personality = false # 是否压缩人格,压缩后会精简人格信息,节省token消耗并提高回复性能,但是会丢失一些信息,如果人设不长,可以关闭
|
compress_personality = false # 是否压缩人格,压缩后会精简人格信息,节省token消耗并提高回复性能,但是会丢失一些信息,如果人设不长,可以关闭
|
||||||
compress_identity = true # 是否压缩身份,压缩后会精简身份信息,节省token消耗并提高回复性能,但是会丢失一些信息,如果不长,可以关闭
|
compress_identity = false # 是否压缩身份,压缩后会精简身份信息,节省token消耗并提高回复性能,但是会丢失一些信息,如果不长,可以关闭
|
||||||
|
|
||||||
[expression]
|
[expression]
|
||||||
# 表达学习配置
|
# 表达学习配置
|
||||||
@@ -283,7 +264,7 @@ path_expansion_path_score_weight = 0.50 # 路径分数在最终评分中的权
|
|||||||
path_expansion_importance_weight = 0.30 # 重要性在最终评分中的权重
|
path_expansion_importance_weight = 0.30 # 重要性在最终评分中的权重
|
||||||
path_expansion_recency_weight = 0.20 # 时效性在最终评分中的权重
|
path_expansion_recency_weight = 0.20 # 时效性在最终评分中的权重
|
||||||
|
|
||||||
# 🆕 路径扩展 - 记忆去重配置
|
# 路径扩展 - 记忆去重配置
|
||||||
enable_memory_deduplication = true # 启用检索结果去重(合并相似记忆)
|
enable_memory_deduplication = true # 启用检索结果去重(合并相似记忆)
|
||||||
memory_deduplication_threshold = 0.85 # 记忆相似度阈值(0.85表示85%相似即合并)
|
memory_deduplication_threshold = 0.85 # 记忆相似度阈值(0.85表示85%相似即合并)
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user