全面更换orjson
This commit is contained in:
@@ -96,7 +96,7 @@ definition: Dict[str, Any] = {"name": cls.name, "description": cls.description,
|
|||||||
```python
|
```python
|
||||||
from src.plugin_system import BaseTool
|
from src.plugin_system import BaseTool
|
||||||
import aiohttp
|
import aiohttp
|
||||||
import json
|
import orjson
|
||||||
|
|
||||||
class WeatherTool(BaseTool):
|
class WeatherTool(BaseTool):
|
||||||
"""天气查询工具 - 获取指定城市的实时天气信息"""
|
"""天气查询工具 - 获取指定城市的实时天气信息"""
|
||||||
|
|||||||
@@ -31,10 +31,11 @@ dependencies = [
|
|||||||
"maim-message>=0.3.8",
|
"maim-message>=0.3.8",
|
||||||
"matplotlib>=3.10.3",
|
"matplotlib>=3.10.3",
|
||||||
"networkx>=3.4.2",
|
"networkx>=3.4.2",
|
||||||
|
"orjson>=3.10",
|
||||||
"numpy>=2.2.6",
|
"numpy>=2.2.6",
|
||||||
"openai>=1.95.0",
|
"openai>=1.95.0",
|
||||||
"opencv-python>=4.11.0.86",
|
"opencv-python>=4.11.0.86",
|
||||||
"packaging>=25.0",
|
"packaging>=23.2",
|
||||||
"pandas>=2.3.1",
|
"pandas>=2.3.1",
|
||||||
"peewee>=3.18.2",
|
"peewee>=3.18.2",
|
||||||
"pillow>=11.3.0",
|
"pillow>=11.3.0",
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
import json
|
import orjson
|
||||||
import os
|
import os
|
||||||
import signal
|
import signal
|
||||||
from concurrent.futures import ThreadPoolExecutor, as_completed
|
from concurrent.futures import ThreadPoolExecutor, as_completed
|
||||||
@@ -74,8 +74,8 @@ def process_single_text(pg_hash, raw_data):
|
|||||||
# 存在对应的提取结果
|
# 存在对应的提取结果
|
||||||
logger.info(f"找到缓存的提取结果:{pg_hash}")
|
logger.info(f"找到缓存的提取结果:{pg_hash}")
|
||||||
with open(temp_file_path, "r", encoding="utf-8") as f:
|
with open(temp_file_path, "r", encoding="utf-8") as f:
|
||||||
return json.load(f), None
|
return orjson.loads(f.read()), None
|
||||||
except json.JSONDecodeError:
|
except orjson.JSONDecodeError:
|
||||||
# 如果JSON文件损坏,删除它并重新处理
|
# 如果JSON文件损坏,删除它并重新处理
|
||||||
logger.warning(f"缓存文件损坏,重新处理:{pg_hash}")
|
logger.warning(f"缓存文件损坏,重新处理:{pg_hash}")
|
||||||
os.remove(temp_file_path)
|
os.remove(temp_file_path)
|
||||||
@@ -97,7 +97,7 @@ def process_single_text(pg_hash, raw_data):
|
|||||||
with file_lock:
|
with file_lock:
|
||||||
try:
|
try:
|
||||||
with open(temp_file_path, "w", encoding="utf-8") as f:
|
with open(temp_file_path, "w", encoding="utf-8") as f:
|
||||||
json.dump(doc_item, f, ensure_ascii=False, indent=4)
|
f.write(orjson.dumps(doc_item, option=orjson.OPT_INDENT_2).decode('utf-8'))
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"保存缓存文件失败:{pg_hash}, 错误:{e}")
|
logger.error(f"保存缓存文件失败:{pg_hash}, 错误:{e}")
|
||||||
# 如果保存失败,确保不会留下损坏的文件
|
# 如果保存失败,确保不会留下损坏的文件
|
||||||
@@ -199,12 +199,12 @@ def main(): # sourcery skip: comprehension-to-generator, extract-method
|
|||||||
filename = now.strftime("%m-%d-%H-%S-openie.json")
|
filename = now.strftime("%m-%d-%H-%S-openie.json")
|
||||||
output_path = os.path.join(OPENIE_OUTPUT_DIR, filename)
|
output_path = os.path.join(OPENIE_OUTPUT_DIR, filename)
|
||||||
with open(output_path, "w", encoding="utf-8") as f:
|
with open(output_path, "w", encoding="utf-8") as f:
|
||||||
json.dump(
|
f.write(
|
||||||
|
orjson.dumps(
|
||||||
openie_obj.to_dict() if hasattr(openie_obj, "to_dict") else openie_obj.__dict__,
|
openie_obj.to_dict() if hasattr(openie_obj, "to_dict") else openie_obj.__dict__,
|
||||||
f,
|
option=orjson.OPT_INDENT_2
|
||||||
ensure_ascii=False,
|
).decode('utf-8')
|
||||||
indent=4,
|
)
|
||||||
)
|
|
||||||
logger.info(f"信息提取结果已保存到: {output_path}")
|
logger.info(f"信息提取结果已保存到: {output_path}")
|
||||||
else:
|
else:
|
||||||
logger.warning("没有可保存的信息提取结果")
|
logger.warning("没有可保存的信息提取结果")
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
import tkinter as tk
|
import tkinter as tk
|
||||||
from tkinter import ttk, messagebox, filedialog, colorchooser
|
from tkinter import ttk, messagebox, filedialog, colorchooser
|
||||||
import json
|
import orjson
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
import threading
|
import threading
|
||||||
import toml
|
import toml
|
||||||
@@ -199,7 +199,7 @@ class LogFormatter:
|
|||||||
parts.append(event)
|
parts.append(event)
|
||||||
elif isinstance(event, dict):
|
elif isinstance(event, dict):
|
||||||
try:
|
try:
|
||||||
parts.append(json.dumps(event, ensure_ascii=False, indent=None))
|
parts.append(orjson.dumps(event).decode('utf-8'))
|
||||||
except (TypeError, ValueError):
|
except (TypeError, ValueError):
|
||||||
parts.append(str(event))
|
parts.append(str(event))
|
||||||
else:
|
else:
|
||||||
@@ -212,7 +212,7 @@ class LogFormatter:
|
|||||||
if key not in ("timestamp", "level", "logger_name", "event"):
|
if key not in ("timestamp", "level", "logger_name", "event"):
|
||||||
if isinstance(value, (dict, list)):
|
if isinstance(value, (dict, list)):
|
||||||
try:
|
try:
|
||||||
value_str = json.dumps(value, ensure_ascii=False, indent=None)
|
value_str = orjson.dumps(value).decode('utf-8')
|
||||||
except (TypeError, ValueError):
|
except (TypeError, ValueError):
|
||||||
value_str = str(value)
|
value_str = str(value)
|
||||||
else:
|
else:
|
||||||
@@ -428,10 +428,10 @@ class AsyncLogLoader:
|
|||||||
# 处理这批数据
|
# 处理这批数据
|
||||||
for line in lines:
|
for line in lines:
|
||||||
try:
|
try:
|
||||||
log_entry = json.loads(line.strip())
|
log_entry = orjson.loads(line.strip())
|
||||||
log_index.add_entry(line_count, log_entry)
|
log_index.add_entry(line_count, log_entry)
|
||||||
line_count += 1
|
line_count += 1
|
||||||
except json.JSONDecodeError:
|
except orjson.JSONDecodeError:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# 更新进度
|
# 更新进度
|
||||||
@@ -844,7 +844,7 @@ class LogViewer:
|
|||||||
if mapping_file.exists():
|
if mapping_file.exists():
|
||||||
try:
|
try:
|
||||||
with open(mapping_file, "r", encoding="utf-8") as f:
|
with open(mapping_file, "r", encoding="utf-8") as f:
|
||||||
custom_mapping = json.load(f)
|
custom_mapping = orjson.loads(f.read())
|
||||||
self.module_name_mapping.update(custom_mapping)
|
self.module_name_mapping.update(custom_mapping)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(f"加载模块映射失败: {e}")
|
print(f"加载模块映射失败: {e}")
|
||||||
@@ -855,7 +855,10 @@ class LogViewer:
|
|||||||
mapping_file.parent.mkdir(exist_ok=True)
|
mapping_file.parent.mkdir(exist_ok=True)
|
||||||
try:
|
try:
|
||||||
with open(mapping_file, "w", encoding="utf-8") as f:
|
with open(mapping_file, "w", encoding="utf-8") as f:
|
||||||
json.dump(self.module_name_mapping, f, ensure_ascii=False, indent=2)
|
f.write(orjson.dumps(
|
||||||
|
self.module_name_mapping,
|
||||||
|
option=orjson.OPT_INDENT_2
|
||||||
|
).decode('utf-8'))
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(f"保存模块映射失败: {e}")
|
print(f"保存模块映射失败: {e}")
|
||||||
|
|
||||||
@@ -1178,7 +1181,7 @@ class LogViewer:
|
|||||||
for line in f:
|
for line in f:
|
||||||
if line.strip():
|
if line.strip():
|
||||||
try:
|
try:
|
||||||
log_entry = json.loads(line)
|
log_entry = orjson.loads(line)
|
||||||
self.log_index.add_entry(line_count, log_entry)
|
self.log_index.add_entry(line_count, log_entry)
|
||||||
new_entries.append(log_entry)
|
new_entries.append(log_entry)
|
||||||
|
|
||||||
@@ -1187,7 +1190,7 @@ class LogViewer:
|
|||||||
new_modules.add(logger_name)
|
new_modules.add(logger_name)
|
||||||
|
|
||||||
line_count += 1
|
line_count += 1
|
||||||
except json.JSONDecodeError:
|
except orjson.JSONDecodeError:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# 如果发现了新模块,在主线程中更新模块集合
|
# 如果发现了新模块,在主线程中更新模块集合
|
||||||
|
|||||||
@@ -7,7 +7,7 @@
|
|||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import argparse
|
import argparse
|
||||||
import json
|
import orjson
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from src.common.logger import get_logger
|
from src.common.logger import get_logger
|
||||||
from src.plugin_system.utils.manifest_utils import (
|
from src.plugin_system.utils.manifest_utils import (
|
||||||
@@ -51,7 +51,10 @@ def create_minimal_manifest(plugin_dir: str, plugin_name: str, description: str
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
with open(manifest_path, "w", encoding="utf-8") as f:
|
with open(manifest_path, "w", encoding="utf-8") as f:
|
||||||
json.dump(minimal_manifest, f, ensure_ascii=False, indent=2)
|
f.write(orjson.dumps(
|
||||||
|
minimal_manifest,
|
||||||
|
option=orjson.OPT_INDENT_2
|
||||||
|
).decode('utf-8'))
|
||||||
print(f"✅ 已创建最小化manifest文件: {manifest_path}")
|
print(f"✅ 已创建最小化manifest文件: {manifest_path}")
|
||||||
return True
|
return True
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
@@ -99,7 +102,10 @@ def create_complete_manifest(plugin_dir: str, plugin_name: str) -> bool:
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
with open(manifest_path, "w", encoding="utf-8") as f:
|
with open(manifest_path, "w", encoding="utf-8") as f:
|
||||||
json.dump(complete_manifest, f, ensure_ascii=False, indent=2)
|
f.write(orjson.dumps(
|
||||||
|
complete_manifest,
|
||||||
|
option=orjson.OPT_INDENT_2
|
||||||
|
).decode('utf-8'))
|
||||||
print(f"✅ 已创建完整manifest模板: {manifest_path}")
|
print(f"✅ 已创建完整manifest模板: {manifest_path}")
|
||||||
print("💡 请根据实际情况修改manifest文件中的内容")
|
print("💡 请根据实际情况修改manifest文件中的内容")
|
||||||
return True
|
return True
|
||||||
@@ -125,7 +131,7 @@ def validate_manifest_file(plugin_dir: str) -> bool:
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
with open(manifest_path, "r", encoding="utf-8") as f:
|
with open(manifest_path, "r", encoding="utf-8") as f:
|
||||||
manifest_data = json.load(f)
|
manifest_data = orjson.loads(f.read())
|
||||||
|
|
||||||
validator = ManifestValidator()
|
validator = ManifestValidator()
|
||||||
is_valid = validator.validate_manifest(manifest_data)
|
is_valid = validator.validate_manifest(manifest_data)
|
||||||
@@ -141,7 +147,7 @@ def validate_manifest_file(plugin_dir: str) -> bool:
|
|||||||
|
|
||||||
return is_valid
|
return is_valid
|
||||||
|
|
||||||
except json.JSONDecodeError as e:
|
except orjson.JSONDecodeError as e:
|
||||||
print(f"❌ Manifest文件格式错误: {e}")
|
print(f"❌ Manifest文件格式错误: {e}")
|
||||||
return False
|
return False
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
import os
|
import os
|
||||||
import json
|
import orjson
|
||||||
import sys # 新增系统模块导入
|
import sys # 新增系统模块导入
|
||||||
|
|
||||||
# import time
|
# import time
|
||||||
@@ -369,7 +369,7 @@ class MongoToSQLiteMigrator:
|
|||||||
|
|
||||||
if field_type in ["CharField", "TextField"]:
|
if field_type in ["CharField", "TextField"]:
|
||||||
if isinstance(value, (list, dict)):
|
if isinstance(value, (list, dict)):
|
||||||
return json.dumps(value, ensure_ascii=False)
|
return orjson.dumps(value, ensure_ascii=False)
|
||||||
return str(value) if value is not None else ""
|
return str(value) if value is not None else ""
|
||||||
|
|
||||||
elif field_type == "IntegerField":
|
elif field_type == "IntegerField":
|
||||||
@@ -895,7 +895,7 @@ class MongoToSQLiteMigrator:
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
with open(filepath, "w", encoding="utf-8") as f:
|
with open(filepath, "w", encoding="utf-8") as f:
|
||||||
json.dump(error_report, f, ensure_ascii=False, indent=2)
|
orjson.dumps(error_report, f, ensure_ascii=False, indent=2)
|
||||||
logger.info(f"错误报告已导出到: {filepath}")
|
logger.info(f"错误报告已导出到: {filepath}")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"导出错误报告失败: {e}")
|
logger.error(f"导出错误报告失败: {e}")
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
import time
|
import time
|
||||||
import random
|
import random
|
||||||
import json
|
import orjson
|
||||||
import os
|
import os
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
|
||||||
@@ -558,7 +558,7 @@ class ExpressionLearnerManager:
|
|||||||
continue
|
continue
|
||||||
try:
|
try:
|
||||||
with open(expr_file, "r", encoding="utf-8") as f:
|
with open(expr_file, "r", encoding="utf-8") as f:
|
||||||
expressions = json.load(f)
|
expressions = orjson.loads(f.read())
|
||||||
|
|
||||||
if not isinstance(expressions, list):
|
if not isinstance(expressions, list):
|
||||||
logger.warning(f"表达方式文件格式错误,跳过: {expr_file}")
|
logger.warning(f"表达方式文件格式错误,跳过: {expr_file}")
|
||||||
@@ -604,7 +604,7 @@ class ExpressionLearnerManager:
|
|||||||
|
|
||||||
migrated_count += 1
|
migrated_count += 1
|
||||||
logger.info(f"已迁移 {expr_file} 到数据库,包含 {len(expressions)} 个表达方式")
|
logger.info(f"已迁移 {expr_file} 到数据库,包含 {len(expressions)} 个表达方式")
|
||||||
except json.JSONDecodeError as e:
|
except orjson.JSONDecodeError as e:
|
||||||
logger.error(f"JSON解析失败 {expr_file}: {e}")
|
logger.error(f"JSON解析失败 {expr_file}: {e}")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"迁移表达方式 {expr_file} 失败: {e}")
|
logger.error(f"迁移表达方式 {expr_file} 失败: {e}")
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
import json
|
import orjson
|
||||||
import time
|
import time
|
||||||
import random
|
import random
|
||||||
import hashlib
|
import hashlib
|
||||||
@@ -304,7 +304,7 @@ class ExpressionSelector:
|
|||||||
# 5. 解析结果
|
# 5. 解析结果
|
||||||
result = repair_json(content)
|
result = repair_json(content)
|
||||||
if isinstance(result, str):
|
if isinstance(result, str):
|
||||||
result = json.loads(result)
|
result = orjson.loads(result)
|
||||||
|
|
||||||
if not isinstance(result, dict) or "selected_situations" not in result:
|
if not isinstance(result, dict) or "selected_situations" not in result:
|
||||||
logger.error("LLM返回格式错误")
|
logger.error("LLM返回格式错误")
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
import json
|
import orjson
|
||||||
import os
|
import os
|
||||||
import math
|
import math
|
||||||
import asyncio
|
import asyncio
|
||||||
@@ -277,8 +277,11 @@ class EmbeddingStore:
|
|||||||
test_vectors[str(idx)] = self._get_embedding(s)
|
test_vectors[str(idx)] = self._get_embedding(s)
|
||||||
|
|
||||||
with open(self.get_test_file_path(), "w", encoding="utf-8") as f:
|
with open(self.get_test_file_path(), "w", encoding="utf-8") as f:
|
||||||
json.dump(test_vectors, f, ensure_ascii=False, indent=2)
|
f.write(orjson.dumps(
|
||||||
|
test_vectors,
|
||||||
|
option=orjson.OPT_INDENT_2
|
||||||
|
).decode('utf-8'))
|
||||||
|
|
||||||
logger.info("测试字符串嵌入向量保存完成")
|
logger.info("测试字符串嵌入向量保存完成")
|
||||||
|
|
||||||
def load_embedding_test_vectors(self):
|
def load_embedding_test_vectors(self):
|
||||||
@@ -287,7 +290,7 @@ class EmbeddingStore:
|
|||||||
if not os.path.exists(path):
|
if not os.path.exists(path):
|
||||||
return None
|
return None
|
||||||
with open(path, "r", encoding="utf-8") as f:
|
with open(path, "r", encoding="utf-8") as f:
|
||||||
return json.load(f)
|
return orjson.loads(f.read())
|
||||||
|
|
||||||
def check_embedding_model_consistency(self):
|
def check_embedding_model_consistency(self):
|
||||||
"""校验当前模型与本地嵌入模型是否一致(使用多线程优化)"""
|
"""校验当前模型与本地嵌入模型是否一致(使用多线程优化)"""
|
||||||
@@ -416,7 +419,9 @@ class EmbeddingStore:
|
|||||||
logger.info(f"{self.namespace}嵌入库的FaissIndex保存成功")
|
logger.info(f"{self.namespace}嵌入库的FaissIndex保存成功")
|
||||||
logger.info(f"正在保存{self.namespace}嵌入库的idx2hash映射到文件{self.idx2hash_file_path}")
|
logger.info(f"正在保存{self.namespace}嵌入库的idx2hash映射到文件{self.idx2hash_file_path}")
|
||||||
with open(self.idx2hash_file_path, "w", encoding="utf-8") as f:
|
with open(self.idx2hash_file_path, "w", encoding="utf-8") as f:
|
||||||
f.write(json.dumps(self.idx2hash, ensure_ascii=False, indent=4))
|
f.write(orjson.dumps(
|
||||||
|
self.idx2hash, option=orjson.OPT_INDENT_2
|
||||||
|
).decode('utf-8'))
|
||||||
logger.info(f"{self.namespace}嵌入库的idx2hash映射保存成功")
|
logger.info(f"{self.namespace}嵌入库的idx2hash映射保存成功")
|
||||||
|
|
||||||
def load_from_file(self) -> None:
|
def load_from_file(self) -> None:
|
||||||
@@ -457,7 +462,7 @@ class EmbeddingStore:
|
|||||||
logger.info(f"正在加载{self.namespace}嵌入库的idx2hash映射...")
|
logger.info(f"正在加载{self.namespace}嵌入库的idx2hash映射...")
|
||||||
logger.debug(f"正在从文件{self.idx2hash_file_path}中加载{self.namespace}嵌入库的idx2hash映射")
|
logger.debug(f"正在从文件{self.idx2hash_file_path}中加载{self.namespace}嵌入库的idx2hash映射")
|
||||||
with open(self.idx2hash_file_path, "r") as f:
|
with open(self.idx2hash_file_path, "r") as f:
|
||||||
self.idx2hash = json.load(f)
|
self.idx2hash = orjson.loads(f.read())
|
||||||
logger.info(f"{self.namespace}嵌入库的idx2hash映射加载成功")
|
logger.info(f"{self.namespace}嵌入库的idx2hash映射加载成功")
|
||||||
else:
|
else:
|
||||||
raise Exception(f"文件{self.idx2hash_file_path}不存在")
|
raise Exception(f"文件{self.idx2hash_file_path}不存在")
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
import asyncio
|
import asyncio
|
||||||
import json
|
import orjson
|
||||||
import time
|
import time
|
||||||
from typing import List, Union
|
from typing import List, Union
|
||||||
|
|
||||||
@@ -20,7 +20,7 @@ def _extract_json_from_text(text: str):
|
|||||||
try:
|
try:
|
||||||
fixed_json = repair_json(text)
|
fixed_json = repair_json(text)
|
||||||
if isinstance(fixed_json, str):
|
if isinstance(fixed_json, str):
|
||||||
parsed_json = json.loads(fixed_json)
|
parsed_json = orjson.loads(fixed_json)
|
||||||
else:
|
else:
|
||||||
parsed_json = fixed_json
|
parsed_json = fixed_json
|
||||||
|
|
||||||
@@ -95,9 +95,10 @@ def _entity_extract(llm_req: LLMRequest, paragraph: str) -> List[str]:
|
|||||||
def _rdf_triple_extract(llm_req: LLMRequest, paragraph: str, entities: list) -> List[List[str]]:
|
def _rdf_triple_extract(llm_req: LLMRequest, paragraph: str, entities: list) -> List[List[str]]:
|
||||||
"""对段落进行实体提取,返回提取出的实体列表(JSON格式)"""
|
"""对段落进行实体提取,返回提取出的实体列表(JSON格式)"""
|
||||||
rdf_extract_context = prompt_template.build_rdf_triple_extract_context(
|
rdf_extract_context = prompt_template.build_rdf_triple_extract_context(
|
||||||
paragraph, entities=json.dumps(entities, ensure_ascii=False)
|
paragraph, entities=orjson.dumps(entities).decode('utf-8')
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
# 使用 asyncio.run 来运行异步方法
|
# 使用 asyncio.run 来运行异步方法
|
||||||
try:
|
try:
|
||||||
# 如果当前已有事件循环在运行,使用它
|
# 如果当前已有事件循环在运行,使用它
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
import json
|
import orjson
|
||||||
import os
|
import os
|
||||||
import time
|
import time
|
||||||
from typing import Dict, List, Tuple
|
from typing import Dict, List, Tuple
|
||||||
@@ -74,7 +74,7 @@ class KGManager:
|
|||||||
# 保存段落hash到文件
|
# 保存段落hash到文件
|
||||||
with open(self.pg_hash_file_path, "w", encoding="utf-8") as f:
|
with open(self.pg_hash_file_path, "w", encoding="utf-8") as f:
|
||||||
data = {"stored_paragraph_hashes": list(self.stored_paragraph_hashes)}
|
data = {"stored_paragraph_hashes": list(self.stored_paragraph_hashes)}
|
||||||
f.write(json.dumps(data, ensure_ascii=False, indent=4))
|
f.write(orjson.dumps(data, option=orjson.OPT_INDENT_2).decode('utf-8'))
|
||||||
|
|
||||||
def load_from_file(self):
|
def load_from_file(self):
|
||||||
"""从文件加载KG数据"""
|
"""从文件加载KG数据"""
|
||||||
@@ -88,7 +88,7 @@ class KGManager:
|
|||||||
|
|
||||||
# 加载段落hash
|
# 加载段落hash
|
||||||
with open(self.pg_hash_file_path, "r", encoding="utf-8") as f:
|
with open(self.pg_hash_file_path, "r", encoding="utf-8") as f:
|
||||||
data = json.load(f)
|
data = orjson.loads(f.read())
|
||||||
self.stored_paragraph_hashes = set(data["stored_paragraph_hashes"])
|
self.stored_paragraph_hashes = set(data["stored_paragraph_hashes"])
|
||||||
|
|
||||||
# 加载实体计数
|
# 加载实体计数
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
import json
|
import orjson
|
||||||
import os
|
import os
|
||||||
import glob
|
import glob
|
||||||
from typing import Any, Dict, List
|
from typing import Any, Dict, List
|
||||||
@@ -113,7 +113,7 @@ class OpenIE:
|
|||||||
data_list = []
|
data_list = []
|
||||||
for file in json_files:
|
for file in json_files:
|
||||||
with open(file, "r", encoding="utf-8") as f:
|
with open(file, "r", encoding="utf-8") as f:
|
||||||
data = json.load(f)
|
data = orjson.loads(f.read())
|
||||||
data_list.append(data)
|
data_list.append(data)
|
||||||
if not data_list:
|
if not data_list:
|
||||||
# print(f"111111111111111111111Root Path : \n{ROOT_PATH}")
|
# print(f"111111111111111111111Root Path : \n{ROOT_PATH}")
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
import json
|
import orjson
|
||||||
from json_repair import repair_json
|
from json_repair import repair_json
|
||||||
|
|
||||||
|
|
||||||
@@ -45,7 +45,7 @@ def fix_broken_generated_json(json_str: str) -> str:
|
|||||||
- Iterating over the JSON string once to determine and fix unclosed braces or brackets.
|
- Iterating over the JSON string once to determine and fix unclosed braces or brackets.
|
||||||
- Ensuring braces and brackets inside string literals are not considered.
|
- Ensuring braces and brackets inside string literals are not considered.
|
||||||
|
|
||||||
If the original json_str string can be successfully loaded by json.loads(), will directly return it without any modification.
|
If the original json_str string can be successfully loaded by orjson.loads(), will directly return it without any modification.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
json_str (str): The malformed JSON string to be fixed.
|
json_str (str): The malformed JSON string to be fixed.
|
||||||
@@ -56,9 +56,9 @@ def fix_broken_generated_json(json_str: str) -> str:
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
# Try to load the JSON to see if it is valid
|
# Try to load the JSON to see if it is valid
|
||||||
json.loads(json_str)
|
orjson.loads(json_str)
|
||||||
return json_str # Return as-is if valid
|
return json_str # Return as-is if valid
|
||||||
except json.JSONDecodeError: ...
|
except orjson.JSONDecodeError: ...
|
||||||
|
|
||||||
# Step 1: Remove trailing content after the last comma.
|
# Step 1: Remove trailing content after the last comma.
|
||||||
last_comma_index = json_str.rfind(",")
|
last_comma_index = json_str.rfind(",")
|
||||||
@@ -80,7 +80,7 @@ def new_fix_broken_generated_json(json_str: str) -> str:
|
|||||||
"""
|
"""
|
||||||
使用 json-repair 库修复格式错误的 JSON 字符串。
|
使用 json-repair 库修复格式错误的 JSON 字符串。
|
||||||
|
|
||||||
如果原始 json_str 字符串可以被 json.loads() 成功加载,则直接返回而不进行任何修改。
|
如果原始 json_str 字符串可以被 orjson.loads() 成功加载,则直接返回而不进行任何修改。
|
||||||
|
|
||||||
参数:
|
参数:
|
||||||
json_str (str): 需要修复的格式错误的 JSON 字符串。
|
json_str (str): 需要修复的格式错误的 JSON 字符串。
|
||||||
@@ -90,8 +90,8 @@ def new_fix_broken_generated_json(json_str: str) -> str:
|
|||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
# 尝试加载 JSON 以查看其是否有效
|
# 尝试加载 JSON 以查看其是否有效
|
||||||
json.loads(json_str)
|
orjson.loads(json_str)
|
||||||
return json_str # 如果有效则按原样返回
|
return json_str # 如果有效则按原样返回
|
||||||
except json.JSONDecodeError:
|
except orjson.JSONDecodeError:
|
||||||
# 如果无效,则尝试修复它
|
# 如果无效,则尝试修复它
|
||||||
return repair_json(json_str)
|
return repair_json(json_str)
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ import math
|
|||||||
import random
|
import random
|
||||||
import time
|
import time
|
||||||
import re
|
import re
|
||||||
import json
|
import orjson
|
||||||
import jieba
|
import jieba
|
||||||
import networkx as nx
|
import networkx as nx
|
||||||
import numpy as np
|
import numpy as np
|
||||||
@@ -912,7 +912,7 @@ class EntorhinalCortex:
|
|||||||
# 将memory_items转换为JSON字符串
|
# 将memory_items转换为JSON字符串
|
||||||
try:
|
try:
|
||||||
memory_items = [str(item) for item in memory_items]
|
memory_items = [str(item) for item in memory_items]
|
||||||
memory_items_json = json.dumps(memory_items, ensure_ascii=False)
|
memory_items_json = orjson.dumps(memory_items).decode("utf-8")
|
||||||
if not memory_items_json:
|
if not memory_items_json:
|
||||||
continue
|
continue
|
||||||
except Exception:
|
except Exception:
|
||||||
@@ -1082,7 +1082,7 @@ class EntorhinalCortex:
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
memory_items = [str(item) for item in memory_items]
|
memory_items = [str(item) for item in memory_items]
|
||||||
if memory_items_json := json.dumps(memory_items, ensure_ascii=False):
|
if memory_items_json := orjson.dumps(memory_items).decode("utf-8"):
|
||||||
nodes_data.append(
|
nodes_data.append(
|
||||||
{
|
{
|
||||||
"concept": concept,
|
"concept": concept,
|
||||||
@@ -1156,7 +1156,7 @@ class EntorhinalCortex:
|
|||||||
for node in nodes:
|
for node in nodes:
|
||||||
concept = node.concept
|
concept = node.concept
|
||||||
try:
|
try:
|
||||||
memory_items = json.loads(node.memory_items)
|
memory_items = orjson.loads(node.memory_items)
|
||||||
if not isinstance(memory_items, list):
|
if not isinstance(memory_items, list):
|
||||||
memory_items = [memory_items] if memory_items else []
|
memory_items = [memory_items] if memory_items else []
|
||||||
|
|
||||||
|
|||||||
@@ -356,10 +356,12 @@ def main():
|
|||||||
result = diagnostics.run_full_diagnosis()
|
result = diagnostics.run_full_diagnosis()
|
||||||
|
|
||||||
# 保存诊断结果
|
# 保存诊断结果
|
||||||
import json
|
import orjson
|
||||||
with open("action_diagnosis_results.json", "w", encoding="utf-8") as f:
|
with open("action_diagnosis_results.json", "w", encoding="utf-8") as f:
|
||||||
json.dump(result, f, indent=2, ensure_ascii=False, default=str)
|
f.write(orjson.dumps(
|
||||||
|
result, option=orjson.OPT_INDENT_2).decode('utf-8')
|
||||||
|
)
|
||||||
|
|
||||||
logger.info("📄 诊断结果已保存到: action_diagnosis_results.json")
|
logger.info("📄 诊断结果已保存到: action_diagnosis_results.json")
|
||||||
|
|
||||||
# 根据诊断结果返回适当的退出代码
|
# 根据诊断结果返回适当的退出代码
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
import time
|
import time
|
||||||
import re
|
import re
|
||||||
import json
|
import orjson
|
||||||
import ast
|
import ast
|
||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
@@ -69,7 +69,7 @@ class InstantMemory:
|
|||||||
return None
|
return None
|
||||||
try:
|
try:
|
||||||
repaired = repair_json(response)
|
repaired = repair_json(response)
|
||||||
result = json.loads(repaired)
|
result = orjson.loads(repaired)
|
||||||
memory_text = result.get("memory_text", "")
|
memory_text = result.get("memory_text", "")
|
||||||
keywords = result.get("keywords", "")
|
keywords = result.get("keywords", "")
|
||||||
if isinstance(keywords, str):
|
if isinstance(keywords, str):
|
||||||
@@ -142,7 +142,7 @@ class InstantMemory:
|
|||||||
return None
|
return None
|
||||||
try:
|
try:
|
||||||
repaired = repair_json(response)
|
repaired = repair_json(response)
|
||||||
result = json.loads(repaired)
|
result = orjson.loads(repaired)
|
||||||
# 解析keywords
|
# 解析keywords
|
||||||
keywords = result.get("keywords", "")
|
keywords = result.get("keywords", "")
|
||||||
if isinstance(keywords, str):
|
if isinstance(keywords, str):
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
import difflib
|
import difflib
|
||||||
import json
|
import orjson
|
||||||
|
|
||||||
from json_repair import repair_json
|
from json_repair import repair_json
|
||||||
from typing import List, Dict
|
from typing import List, Dict
|
||||||
@@ -30,7 +30,7 @@ def get_keywords_from_json(json_str) -> List:
|
|||||||
fixed_json = repair_json(json_str)
|
fixed_json = repair_json(json_str)
|
||||||
|
|
||||||
# 如果repair_json返回的是字符串,需要解析为Python对象
|
# 如果repair_json返回的是字符串,需要解析为Python对象
|
||||||
result = json.loads(fixed_json) if isinstance(fixed_json, str) else fixed_json
|
result = orjson.loads(fixed_json) if isinstance(fixed_json, str) else fixed_json
|
||||||
return result.get("keywords", [])
|
return result.get("keywords", [])
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"解析关键词JSON失败: {e}")
|
logger.error(f"解析关键词JSON失败: {e}")
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
import re
|
import re
|
||||||
import traceback
|
import traceback
|
||||||
import json
|
import orjson
|
||||||
from typing import Union
|
from typing import Union
|
||||||
|
|
||||||
from src.common.database.sqlalchemy_models import Messages, Images
|
from src.common.database.sqlalchemy_models import Messages, Images
|
||||||
@@ -67,7 +67,7 @@ class MessageStorage:
|
|||||||
user_info_from_chat = chat_info_dict.get("user_info") or {}
|
user_info_from_chat = chat_info_dict.get("user_info") or {}
|
||||||
|
|
||||||
# 将priority_info字典序列化为JSON字符串,以便存储到数据库的Text字段
|
# 将priority_info字典序列化为JSON字符串,以便存储到数据库的Text字段
|
||||||
priority_info_json = json.dumps(priority_info) if priority_info else None
|
priority_info_json = orjson.dumps(priority_info).decode('utf-8') if priority_info else None
|
||||||
|
|
||||||
# 获取数据库会话
|
# 获取数据库会话
|
||||||
|
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
import json
|
import orjson
|
||||||
import time
|
import time
|
||||||
import traceback
|
import traceback
|
||||||
from typing import Dict, Any, Optional, Tuple
|
from typing import Dict, Any, Optional, Tuple
|
||||||
@@ -264,7 +264,7 @@ class ActionPlanner:
|
|||||||
|
|
||||||
if llm_content:
|
if llm_content:
|
||||||
try:
|
try:
|
||||||
parsed_json = json.loads(repair_json(llm_content))
|
parsed_json = orjson.loads(repair_json(llm_content))
|
||||||
|
|
||||||
if isinstance(parsed_json, list):
|
if isinstance(parsed_json, list):
|
||||||
if parsed_json:
|
if parsed_json:
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
错别字生成器 - 基于拼音和字频的中文错别字生成工具
|
错别字生成器 - 基于拼音和字频的中文错别字生成工具
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import json
|
import orjson
|
||||||
import math
|
import math
|
||||||
import os
|
import os
|
||||||
import random
|
import random
|
||||||
@@ -52,7 +52,7 @@ class ChineseTypoGenerator:
|
|||||||
# 如果缓存文件存在,直接加载
|
# 如果缓存文件存在,直接加载
|
||||||
if cache_file.exists():
|
if cache_file.exists():
|
||||||
with open(cache_file, "r", encoding="utf-8") as f:
|
with open(cache_file, "r", encoding="utf-8") as f:
|
||||||
return json.load(f)
|
return orjson.loads(f.read())
|
||||||
|
|
||||||
# 使用内置的词频文件
|
# 使用内置的词频文件
|
||||||
char_freq = defaultdict(int)
|
char_freq = defaultdict(int)
|
||||||
@@ -73,7 +73,9 @@ class ChineseTypoGenerator:
|
|||||||
|
|
||||||
# 保存到缓存文件
|
# 保存到缓存文件
|
||||||
with open(cache_file, "w", encoding="utf-8") as f:
|
with open(cache_file, "w", encoding="utf-8") as f:
|
||||||
json.dump(normalized_freq, f, ensure_ascii=False, indent=2)
|
f.write(orjson.dumps(
|
||||||
|
normalized_freq, option=orjson.OPT_INDENT_2).decode('utf-8')
|
||||||
|
)
|
||||||
|
|
||||||
return normalized_freq
|
return normalized_freq
|
||||||
|
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
import time
|
import time
|
||||||
import json
|
import orjson
|
||||||
import hashlib
|
import hashlib
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
import numpy as np
|
import numpy as np
|
||||||
@@ -106,7 +106,7 @@ class CacheManager:
|
|||||||
logger.warning(f"无法获取文件信息: {tool_file_path},错误: {e}")
|
logger.warning(f"无法获取文件信息: {tool_file_path},错误: {e}")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
sorted_args = json.dumps(function_args, sort_keys=True)
|
sorted_args = orjson.dumps(function_args, option=orjson.OPT_SORT_KEYS).decode('utf-8')
|
||||||
except TypeError:
|
except TypeError:
|
||||||
sorted_args = repr(sorted(function_args.items()))
|
sorted_args = repr(sorted(function_args.items()))
|
||||||
return f"{tool_name}::{sorted_args}::{file_hash}"
|
return f"{tool_name}::{sorted_args}::{file_hash}"
|
||||||
@@ -163,7 +163,7 @@ class CacheManager:
|
|||||||
expires_at = cache_results["expires_at"]
|
expires_at = cache_results["expires_at"]
|
||||||
if time.time() < expires_at:
|
if time.time() < expires_at:
|
||||||
logger.info(f"命中L2键值缓存: {key}")
|
logger.info(f"命中L2键值缓存: {key}")
|
||||||
data = json.loads(cache_results["cache_value"])
|
data = orjson.loads(cache_results["cache_value"])
|
||||||
|
|
||||||
# 更新访问统计
|
# 更新访问统计
|
||||||
await db_query(
|
await db_query(
|
||||||
@@ -209,7 +209,7 @@ class CacheManager:
|
|||||||
if semantic_cache_results:
|
if semantic_cache_results:
|
||||||
expires_at = semantic_cache_results["expires_at"]
|
expires_at = semantic_cache_results["expires_at"]
|
||||||
if time.time() < expires_at:
|
if time.time() < expires_at:
|
||||||
data = json.loads(semantic_cache_results["cache_value"])
|
data = orjson.loads(semantic_cache_results["cache_value"])
|
||||||
logger.debug(f"L2语义缓存返回的数据: {data}")
|
logger.debug(f"L2语义缓存返回的数据: {data}")
|
||||||
|
|
||||||
# 回填 L1
|
# 回填 L1
|
||||||
@@ -245,7 +245,7 @@ class CacheManager:
|
|||||||
# 写入 L2 (数据库)
|
# 写入 L2 (数据库)
|
||||||
cache_data = {
|
cache_data = {
|
||||||
"cache_key": key,
|
"cache_key": key,
|
||||||
"cache_value": json.dumps(data, ensure_ascii=False),
|
"cache_value": orjson.dumps(data).decode('utf-8'),
|
||||||
"expires_at": expires_at,
|
"expires_at": expires_at,
|
||||||
"tool_name": tool_name,
|
"tool_name": tool_name,
|
||||||
"created_at": time.time(),
|
"created_at": time.time(),
|
||||||
|
|||||||
@@ -340,14 +340,14 @@ async def store_action_info(
|
|||||||
保存的记录数据或None
|
保存的记录数据或None
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
import json
|
import orjson
|
||||||
|
|
||||||
# 构建动作记录数据
|
# 构建动作记录数据
|
||||||
record_data = {
|
record_data = {
|
||||||
"action_id": thinking_id or str(int(time.time() * 1000000)),
|
"action_id": thinking_id or str(int(time.time() * 1000000)),
|
||||||
"time": time.time(),
|
"time": time.time(),
|
||||||
"action_name": action_name,
|
"action_name": action_name,
|
||||||
"action_data": json.dumps(action_data or {}, ensure_ascii=False),
|
"action_data": orjson.dumps(action_data or {}).decode('utf-8'),
|
||||||
"action_done": action_done,
|
"action_done": action_done,
|
||||||
"action_build_into_prompt": action_build_into_prompt,
|
"action_build_into_prompt": action_build_into_prompt,
|
||||||
"action_prompt_display": action_prompt_display,
|
"action_prompt_display": action_prompt_display,
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
# 使用基于时间戳的文件处理器,简单的轮转份数限制
|
# 使用基于时间戳的文件处理器,简单的轮转份数限制
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
import json
|
import orjson
|
||||||
import threading
|
import threading
|
||||||
import time
|
import time
|
||||||
import structlog
|
import structlog
|
||||||
@@ -696,7 +696,7 @@ class ModuleColoredConsoleRenderer:
|
|||||||
elif isinstance(event, dict):
|
elif isinstance(event, dict):
|
||||||
# 如果是字典,格式化为可读字符串
|
# 如果是字典,格式化为可读字符串
|
||||||
try:
|
try:
|
||||||
event_content = json.dumps(event, ensure_ascii=False, indent=None)
|
event_content = orjson.dumps(event).decode("utf-8")
|
||||||
except (TypeError, ValueError):
|
except (TypeError, ValueError):
|
||||||
event_content = str(event)
|
event_content = str(event)
|
||||||
else:
|
else:
|
||||||
@@ -716,7 +716,7 @@ class ModuleColoredConsoleRenderer:
|
|||||||
# 确保值也转换为字符串
|
# 确保值也转换为字符串
|
||||||
if isinstance(value, (dict, list)):
|
if isinstance(value, (dict, list)):
|
||||||
try:
|
try:
|
||||||
value_str = json.dumps(value, ensure_ascii=False, indent=None)
|
value_str = orjson.dumps(value).decode("utf-8")
|
||||||
except (TypeError, ValueError):
|
except (TypeError, ValueError):
|
||||||
value_str = str(value)
|
value_str = str(value)
|
||||||
else:
|
else:
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
import json
|
import orjson
|
||||||
import os
|
import os
|
||||||
import hashlib
|
import hashlib
|
||||||
import time
|
import time
|
||||||
@@ -109,7 +109,7 @@ class Individuality:
|
|||||||
"personality_side": personality_side,
|
"personality_side": personality_side,
|
||||||
"compress_personality": global_config.personality.compress_personality,
|
"compress_personality": global_config.personality.compress_personality,
|
||||||
}
|
}
|
||||||
personality_str = json.dumps(personality_config, sort_keys=True)
|
personality_str = orjson.dumps(personality_config, option=orjson.OPT_SORT_KEYS).decode('utf-8')
|
||||||
personality_hash = hashlib.md5(personality_str.encode("utf-8")).hexdigest()
|
personality_hash = hashlib.md5(personality_str.encode("utf-8")).hexdigest()
|
||||||
|
|
||||||
# 身份配置哈希
|
# 身份配置哈希
|
||||||
@@ -117,7 +117,7 @@ class Individuality:
|
|||||||
"identity": identity,
|
"identity": identity,
|
||||||
"compress_identity": global_config.personality.compress_identity,
|
"compress_identity": global_config.personality.compress_identity,
|
||||||
}
|
}
|
||||||
identity_str = json.dumps(identity_config, sort_keys=True)
|
identity_str = orjson.dumps(identity_config,option=orjson.OPT_SORT_KEYS).decode('utf-8')
|
||||||
identity_hash = hashlib.md5(identity_str.encode("utf-8")).hexdigest()
|
identity_hash = hashlib.md5(identity_str.encode("utf-8")).hexdigest()
|
||||||
|
|
||||||
return personality_hash, identity_hash
|
return personality_hash, identity_hash
|
||||||
@@ -173,8 +173,8 @@ class Individuality:
|
|||||||
if os.path.exists(self.meta_info_file_path):
|
if os.path.exists(self.meta_info_file_path):
|
||||||
try:
|
try:
|
||||||
with open(self.meta_info_file_path, "r", encoding="utf-8") as f:
|
with open(self.meta_info_file_path, "r", encoding="utf-8") as f:
|
||||||
return json.load(f)
|
return orjson.loads(f.read())
|
||||||
except (json.JSONDecodeError, IOError) as e:
|
except (orjson.JSONDecodeError, IOError) as e:
|
||||||
logger.error(f"读取meta_info文件失败: {e}, 将创建新文件。")
|
logger.error(f"读取meta_info文件失败: {e}, 将创建新文件。")
|
||||||
return {}
|
return {}
|
||||||
return {}
|
return {}
|
||||||
@@ -184,7 +184,9 @@ class Individuality:
|
|||||||
try:
|
try:
|
||||||
os.makedirs(os.path.dirname(self.meta_info_file_path), exist_ok=True)
|
os.makedirs(os.path.dirname(self.meta_info_file_path), exist_ok=True)
|
||||||
with open(self.meta_info_file_path, "w", encoding="utf-8") as f:
|
with open(self.meta_info_file_path, "w", encoding="utf-8") as f:
|
||||||
json.dump(meta_info, f, ensure_ascii=False, indent=2)
|
f.write(orjson.dumps(
|
||||||
|
meta_info, option=orjson.OPT_INDENT_2).decode('utf-8')
|
||||||
|
)
|
||||||
except IOError as e:
|
except IOError as e:
|
||||||
logger.error(f"保存meta_info文件失败: {e}")
|
logger.error(f"保存meta_info文件失败: {e}")
|
||||||
|
|
||||||
@@ -193,8 +195,8 @@ class Individuality:
|
|||||||
if os.path.exists(self.personality_data_file_path):
|
if os.path.exists(self.personality_data_file_path):
|
||||||
try:
|
try:
|
||||||
with open(self.personality_data_file_path, "r", encoding="utf-8") as f:
|
with open(self.personality_data_file_path, "r", encoding="utf-8") as f:
|
||||||
return json.load(f)
|
return orjson.loads(f.read())
|
||||||
except (json.JSONDecodeError, IOError) as e:
|
except (orjson.JSONDecodeError, IOError) as e:
|
||||||
logger.error(f"读取personality_data文件失败: {e}, 将创建新文件。")
|
logger.error(f"读取personality_data文件失败: {e}, 将创建新文件。")
|
||||||
return {}
|
return {}
|
||||||
return {}
|
return {}
|
||||||
@@ -204,7 +206,9 @@ class Individuality:
|
|||||||
try:
|
try:
|
||||||
os.makedirs(os.path.dirname(self.personality_data_file_path), exist_ok=True)
|
os.makedirs(os.path.dirname(self.personality_data_file_path), exist_ok=True)
|
||||||
with open(self.personality_data_file_path, "w", encoding="utf-8") as f:
|
with open(self.personality_data_file_path, "w", encoding="utf-8") as f:
|
||||||
json.dump(personality_data, f, ensure_ascii=False, indent=2)
|
f.write(orjson.dumps(
|
||||||
|
personality_data, option=orjson.OPT_INDENT_2).decode('utf-8')
|
||||||
|
)
|
||||||
logger.debug(f"已保存personality数据到文件: {self.personality_data_file_path}")
|
logger.debug(f"已保存personality数据到文件: {self.personality_data_file_path}")
|
||||||
except IOError as e:
|
except IOError as e:
|
||||||
logger.error(f"保存personality_data文件失败: {e}")
|
logger.error(f"保存personality_data文件失败: {e}")
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
from typing import Dict, List
|
from typing import Dict, List
|
||||||
import json
|
import orjson
|
||||||
import os
|
import os
|
||||||
from dotenv import load_dotenv
|
from dotenv import load_dotenv
|
||||||
import sys
|
import sys
|
||||||
@@ -158,7 +158,7 @@ class PersonalityEvaluatorDirect:
|
|||||||
end_idx = ai_response.rfind("}") + 1
|
end_idx = ai_response.rfind("}") + 1
|
||||||
if start_idx != -1 and end_idx != 0:
|
if start_idx != -1 and end_idx != 0:
|
||||||
json_str = ai_response[start_idx:end_idx]
|
json_str = ai_response[start_idx:end_idx]
|
||||||
scores = json.loads(json_str)
|
scores = orjson.loads(json_str)
|
||||||
# 确保所有分数在1-6之间
|
# 确保所有分数在1-6之间
|
||||||
return {k: max(1, min(6, float(v))) for k, v in scores.items()}
|
return {k: max(1, min(6, float(v))) for k, v in scores.items()}
|
||||||
else:
|
else:
|
||||||
@@ -296,14 +296,18 @@ def main():
|
|||||||
|
|
||||||
# 保存简化的结果
|
# 保存简化的结果
|
||||||
with open(save_path, "w", encoding="utf-8") as f:
|
with open(save_path, "w", encoding="utf-8") as f:
|
||||||
json.dump(simplified_result, f, ensure_ascii=False, indent=4)
|
f.write(orjson.dumps(
|
||||||
|
simplified_result, option=orjson.OPT_INDENT_2).decode('utf-8')
|
||||||
|
)
|
||||||
|
|
||||||
print(f"\n结果已保存到 {save_path}")
|
print(f"\n结果已保存到 {save_path}")
|
||||||
|
|
||||||
# 同时保存完整结果到results目录
|
# 同时保存完整结果到results目录
|
||||||
os.makedirs("results", exist_ok=True)
|
os.makedirs("results", exist_ok=True)
|
||||||
with open("results/personality_result.json", "w", encoding="utf-8") as f:
|
with open("results/personality_result.json", "w", encoding="utf-8") as f:
|
||||||
json.dump(result, f, ensure_ascii=False, indent=2)
|
f.write(orjson.dumps(
|
||||||
|
result, option=orjson.OPT_INDENT_2).decode('utf-8')
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
import json
|
import orjson
|
||||||
import os
|
import os
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
@@ -14,7 +14,7 @@ def load_scenes() -> dict[str, Any]:
|
|||||||
json_path = os.path.join(current_dir, "template_scene.json")
|
json_path = os.path.join(current_dir, "template_scene.json")
|
||||||
|
|
||||||
with open(json_path, "r", encoding="utf-8") as f:
|
with open(json_path, "r", encoding="utf-8") as f:
|
||||||
return json.load(f)
|
return orjson.loads(f.read())
|
||||||
|
|
||||||
|
|
||||||
PERSONALITY_SCENES = load_scenes()
|
PERSONALITY_SCENES = load_scenes()
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
import asyncio
|
import asyncio
|
||||||
import json
|
import orjson
|
||||||
import io
|
import io
|
||||||
from typing import Callable, Any, Coroutine, Optional
|
from typing import Callable, Any, Coroutine, Optional
|
||||||
import aiohttp
|
import aiohttp
|
||||||
@@ -200,7 +200,7 @@ class AiohttpGeminiStreamParser:
|
|||||||
if chunk_text == "[DONE]":
|
if chunk_text == "[DONE]":
|
||||||
return
|
return
|
||||||
|
|
||||||
chunk_data = json.loads(chunk_text)
|
chunk_data = orjson.loads(chunk_text)
|
||||||
|
|
||||||
# 解析候选项
|
# 解析候选项
|
||||||
if "candidates" in chunk_data and chunk_data["candidates"]:
|
if "candidates" in chunk_data and chunk_data["candidates"]:
|
||||||
@@ -231,7 +231,7 @@ class AiohttpGeminiStreamParser:
|
|||||||
usage.get("totalTokenCount", 0)
|
usage.get("totalTokenCount", 0)
|
||||||
)
|
)
|
||||||
|
|
||||||
except json.JSONDecodeError as e:
|
except orjson.JSONDecodeError as e:
|
||||||
logger.warning(f"解析流式数据块失败: {e}, 数据: {chunk_text}")
|
logger.warning(f"解析流式数据块失败: {e}, 数据: {chunk_text}")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"处理流式数据块时出错: {e}")
|
logger.error(f"处理流式数据块时出错: {e}")
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
import asyncio
|
import asyncio
|
||||||
import io
|
import io
|
||||||
import json
|
import orjson
|
||||||
import re
|
import re
|
||||||
import base64
|
import base64
|
||||||
from collections.abc import Iterable
|
from collections.abc import Iterable
|
||||||
@@ -218,13 +218,13 @@ def _build_stream_api_resp(
|
|||||||
raw_arg_data = arguments_buffer.getvalue()
|
raw_arg_data = arguments_buffer.getvalue()
|
||||||
arguments_buffer.close()
|
arguments_buffer.close()
|
||||||
try:
|
try:
|
||||||
arguments = json.loads(repair_json(raw_arg_data))
|
arguments = orjson.loads(repair_json(raw_arg_data))
|
||||||
if not isinstance(arguments, dict):
|
if not isinstance(arguments, dict):
|
||||||
raise RespParseException(
|
raise RespParseException(
|
||||||
None,
|
None,
|
||||||
f"响应解析失败,工具调用参数无法解析为字典类型。工具调用参数原始响应:\n{raw_arg_data}",
|
f"响应解析失败,工具调用参数无法解析为字典类型。工具调用参数原始响应:\n{raw_arg_data}",
|
||||||
)
|
)
|
||||||
except json.JSONDecodeError as e:
|
except orjson.JSONDecodeError as e:
|
||||||
raise RespParseException(
|
raise RespParseException(
|
||||||
None,
|
None,
|
||||||
f"响应解析失败,无法解析工具调用参数。工具调用参数原始响应:{raw_arg_data}",
|
f"响应解析失败,无法解析工具调用参数。工具调用参数原始响应:{raw_arg_data}",
|
||||||
@@ -349,11 +349,11 @@ def _default_normal_response_parser(
|
|||||||
api_response.tool_calls = []
|
api_response.tool_calls = []
|
||||||
for call in message_part.tool_calls:
|
for call in message_part.tool_calls:
|
||||||
try:
|
try:
|
||||||
arguments = json.loads(repair_json(call.function.arguments))
|
arguments = orjson.loads(repair_json(call.function.arguments))
|
||||||
if not isinstance(arguments, dict):
|
if not isinstance(arguments, dict):
|
||||||
raise RespParseException(resp, "响应解析失败,工具调用参数无法解析为字典类型")
|
raise RespParseException(resp, "响应解析失败,工具调用参数无法解析为字典类型")
|
||||||
api_response.tool_calls.append(ToolCall(call.id, call.function.name, arguments))
|
api_response.tool_calls.append(ToolCall(call.id, call.function.name, arguments))
|
||||||
except json.JSONDecodeError as e:
|
except orjson.JSONDecodeError as e:
|
||||||
raise RespParseException(resp, "响应解析失败,无法解析工具调用参数") from e
|
raise RespParseException(resp, "响应解析失败,无法解析工具调用参数") from e
|
||||||
|
|
||||||
# 提取Usage信息
|
# 提取Usage信息
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
import json
|
import orjson
|
||||||
import time
|
import time
|
||||||
|
|
||||||
from json_repair import repair_json
|
from json_repair import repair_json
|
||||||
@@ -165,7 +165,7 @@ class ChatAction:
|
|||||||
logger.info(f"response: {response}")
|
logger.info(f"response: {response}")
|
||||||
logger.info(f"reasoning_content: {reasoning_content}")
|
logger.info(f"reasoning_content: {reasoning_content}")
|
||||||
|
|
||||||
if action_data := json.loads(repair_json(response)):
|
if action_data := orjson.loads(repair_json(response)):
|
||||||
# 记录原动作,切换后进入冷却
|
# 记录原动作,切换后进入冷却
|
||||||
prev_body_action = self.body_action
|
prev_body_action = self.body_action
|
||||||
new_body_action = action_data.get("body_action", self.body_action)
|
new_body_action = action_data.get("body_action", self.body_action)
|
||||||
@@ -228,7 +228,7 @@ class ChatAction:
|
|||||||
logger.info(f"response: {response}")
|
logger.info(f"response: {response}")
|
||||||
logger.info(f"reasoning_content: {reasoning_content}")
|
logger.info(f"reasoning_content: {reasoning_content}")
|
||||||
|
|
||||||
if action_data := json.loads(repair_json(response)):
|
if action_data := orjson.loads(repair_json(response)):
|
||||||
prev_body_action = self.body_action
|
prev_body_action = self.body_action
|
||||||
new_body_action = action_data.get("body_action", self.body_action)
|
new_body_action = action_data.get("body_action", self.body_action)
|
||||||
if new_body_action != prev_body_action and prev_body_action:
|
if new_body_action != prev_body_action and prev_body_action:
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
import asyncio
|
import asyncio
|
||||||
import json
|
import orjson
|
||||||
from collections import deque
|
from collections import deque
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from typing import Dict, List, Optional
|
from typing import Dict, List, Optional
|
||||||
@@ -299,7 +299,7 @@ class ContextWebManager:
|
|||||||
ws.onmessage = function(event) {
|
ws.onmessage = function(event) {
|
||||||
console.log('收到WebSocket消息:', event.data);
|
console.log('收到WebSocket消息:', event.data);
|
||||||
try {
|
try {
|
||||||
const data = JSON.parse(event.data);
|
const data = orjson.parse(event.data);
|
||||||
updateMessages(data.contexts);
|
updateMessages(data.contexts);
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
console.error('解析消息失败:', e, event.data);
|
console.error('解析消息失败:', e, event.data);
|
||||||
@@ -573,7 +573,7 @@ class ContextWebManager:
|
|||||||
</div>
|
</div>
|
||||||
|
|
||||||
<script>
|
<script>
|
||||||
console.log('调试信息:', {json.dumps(debug_info, ensure_ascii=False, indent=2)});
|
console.log('调试信息:', {orjson.dumps(debug_info,option=orjson.OPT_INDENT_2).decode('utf-8')});
|
||||||
setTimeout(() => location.reload(), 5000); // 5秒自动刷新
|
setTimeout(() => location.reload(), 5000); // 5秒自动刷新
|
||||||
</script>
|
</script>
|
||||||
</body>
|
</body>
|
||||||
@@ -619,7 +619,7 @@ class ContextWebManager:
|
|||||||
contexts_data = [msg.to_dict() for msg in all_context_msgs[-self.max_messages:]]
|
contexts_data = [msg.to_dict() for msg in all_context_msgs[-self.max_messages:]]
|
||||||
|
|
||||||
data = {"contexts": contexts_data}
|
data = {"contexts": contexts_data}
|
||||||
await ws.send_str(json.dumps(data, ensure_ascii=False))
|
await ws.send_str(orjson.dumps(data).decode('utf-8'))
|
||||||
|
|
||||||
async def broadcast_contexts(self):
|
async def broadcast_contexts(self):
|
||||||
"""向所有WebSocket连接广播上下文更新"""
|
"""向所有WebSocket连接广播上下文更新"""
|
||||||
@@ -638,8 +638,8 @@ class ContextWebManager:
|
|||||||
contexts_data = [msg.to_dict() for msg in all_context_msgs[-self.max_messages:]]
|
contexts_data = [msg.to_dict() for msg in all_context_msgs[-self.max_messages:]]
|
||||||
|
|
||||||
data = {"contexts": contexts_data}
|
data = {"contexts": contexts_data}
|
||||||
message = json.dumps(data, ensure_ascii=False)
|
message = orjson.dumps(data).decode('utf-8')
|
||||||
|
|
||||||
logger.info(f"广播 {len(contexts_data)} 条消息到 {len(self.websockets)} 个WebSocket连接")
|
logger.info(f"广播 {len(contexts_data)} 条消息到 {len(self.websockets)} 个WebSocket连接")
|
||||||
|
|
||||||
# 创建WebSocket列表的副本,避免在遍历时修改
|
# 创建WebSocket列表的副本,避免在遍历时修改
|
||||||
|
|||||||
@@ -12,7 +12,7 @@ from src.config.config import global_config
|
|||||||
from src.common.message.api import get_global_api
|
from src.common.message.api import get_global_api
|
||||||
from src.chat.message_receive.storage import MessageStorage
|
from src.chat.message_receive.storage import MessageStorage
|
||||||
from .s4u_watching_manager import watching_manager
|
from .s4u_watching_manager import watching_manager
|
||||||
import json
|
import orjson
|
||||||
from .s4u_mood_manager import mood_manager
|
from .s4u_mood_manager import mood_manager
|
||||||
from src.person_info.relationship_builder_manager import relationship_builder_manager
|
from src.person_info.relationship_builder_manager import relationship_builder_manager
|
||||||
from src.mais4u.s4u_config import s4u_config
|
from src.mais4u.s4u_config import s4u_config
|
||||||
@@ -215,8 +215,8 @@ class S4UChat:
|
|||||||
priority_info = {}
|
priority_info = {}
|
||||||
if isinstance(priority_info_raw, str):
|
if isinstance(priority_info_raw, str):
|
||||||
try:
|
try:
|
||||||
priority_info = json.loads(priority_info_raw)
|
priority_info = orjson.loads(priority_info_raw)
|
||||||
except json.JSONDecodeError:
|
except orjson.JSONDecodeError:
|
||||||
logger.warning(f"Failed to parse priority_info JSON: {priority_info_raw}")
|
logger.warning(f"Failed to parse priority_info JSON: {priority_info_raw}")
|
||||||
elif isinstance(priority_info_raw, dict):
|
elif isinstance(priority_info_raw, dict):
|
||||||
priority_info = priority_info_raw
|
priority_info = priority_info_raw
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
import asyncio
|
import asyncio
|
||||||
import json
|
import orjson
|
||||||
import time
|
import time
|
||||||
|
|
||||||
from src.chat.message_receive.message import MessageRecv
|
from src.chat.message_receive.message import MessageRecv
|
||||||
@@ -132,7 +132,7 @@ class ChatMood:
|
|||||||
elif "```" in response:
|
elif "```" in response:
|
||||||
response = response.split("```")[1].split("```")[0]
|
response = response.split("```")[1].split("```")[0]
|
||||||
|
|
||||||
data = json.loads(response)
|
data = orjson.loads(response)
|
||||||
|
|
||||||
# Validate
|
# Validate
|
||||||
required_keys = {"joy", "anger", "sorrow", "fear"}
|
required_keys = {"joy", "anger", "sorrow", "fear"}
|
||||||
@@ -148,7 +148,7 @@ class ChatMood:
|
|||||||
|
|
||||||
return {key: data[key] for key in required_keys}
|
return {key: data[key] for key in required_keys}
|
||||||
|
|
||||||
except json.JSONDecodeError:
|
except orjson.JSONDecodeError:
|
||||||
logger.warning(f"Failed to parse numerical mood JSON: {response}")
|
logger.warning(f"Failed to parse numerical mood JSON: {response}")
|
||||||
return None
|
return None
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
import json
|
import orjson
|
||||||
import os
|
import os
|
||||||
|
|
||||||
from src.common.logger import get_logger
|
from src.common.logger import get_logger
|
||||||
@@ -49,27 +49,27 @@ class LocalStoreManager:
|
|||||||
logger.debug(f"加载本地存储数据: {self.file_path}")
|
logger.debug(f"加载本地存储数据: {self.file_path}")
|
||||||
try:
|
try:
|
||||||
with open(self.file_path, "r", encoding="utf-8") as f:
|
with open(self.file_path, "r", encoding="utf-8") as f:
|
||||||
self.store = json.load(f)
|
self.store = orjson.loads(f.read())
|
||||||
logger.info("全都记起来了!")
|
logger.info("全都记起来了!")
|
||||||
except json.JSONDecodeError:
|
except orjson.JSONDecodeError:
|
||||||
logger.warning("啊咧?记事本被弄脏了,正在重建记事本......")
|
logger.warning("啊咧?记事本被弄脏了,正在重建记事本......")
|
||||||
self.store = {}
|
self.store = {}
|
||||||
with open(self.file_path, "w", encoding="utf-8") as f:
|
with open(self.file_path, "w", encoding="utf-8") as f:
|
||||||
json.dump({}, f, ensure_ascii=False, indent=4)
|
f.write(orjson.dumps({}, option=orjson.OPT_INDENT_2).decode('utf-8'))
|
||||||
logger.info("记事本重建成功!")
|
logger.info("记事本重建成功!")
|
||||||
else:
|
else:
|
||||||
# 不存在本地存储文件,创建新的目录和文件
|
# 不存在本地存储文件,创建新的目录和文件
|
||||||
logger.warning("啊咧?记事本不存在,正在创建新的记事本......")
|
logger.warning("啊咧?记事本不存在,正在创建新的记事本......")
|
||||||
os.makedirs(os.path.dirname(self.file_path), exist_ok=True)
|
os.makedirs(os.path.dirname(self.file_path), exist_ok=True)
|
||||||
with open(self.file_path, "w", encoding="utf-8") as f:
|
with open(self.file_path, "w", encoding="utf-8") as f:
|
||||||
json.dump({}, f, ensure_ascii=False, indent=4)
|
f.write(orjson.dumps({}, option=orjson.OPT_INDENT_2).decode('utf-8'))
|
||||||
logger.info("记事本创建成功!")
|
logger.info("记事本创建成功!")
|
||||||
|
|
||||||
def save_local_store(self):
|
def save_local_store(self):
|
||||||
"""保存本地存储数据"""
|
"""保存本地存储数据"""
|
||||||
logger.debug(f"保存本地存储数据: {self.file_path}")
|
logger.debug(f"保存本地存储数据: {self.file_path}")
|
||||||
with open(self.file_path, "w", encoding="utf-8") as f:
|
with open(self.file_path, "w", encoding="utf-8") as f:
|
||||||
json.dump(self.store, f, ensure_ascii=False, indent=4)
|
f.write(orjson.dumps(self.store, option=orjson.OPT_INDENT_2).decode('utf-8'))
|
||||||
|
|
||||||
|
|
||||||
local_storage = LocalStoreManager("data/local_store.json") # 全局单例化
|
local_storage = LocalStoreManager("data/local_store.json") # 全局单例化
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
import json
|
import orjson
|
||||||
import asyncio
|
import asyncio
|
||||||
import random
|
import random
|
||||||
from datetime import datetime, time, timedelta
|
from datetime import datetime, time, timedelta
|
||||||
@@ -151,7 +151,7 @@ class ScheduleManager:
|
|||||||
logger.info(f"从数据库加载今天的日程 ({today_str})。")
|
logger.info(f"从数据库加载今天的日程 ({today_str})。")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
schedule_data = json.loads(str(schedule_record.schedule_data))
|
schedule_data = orjson.loads(str(schedule_record.schedule_data))
|
||||||
|
|
||||||
# 使用Pydantic验证日程数据
|
# 使用Pydantic验证日程数据
|
||||||
if self._validate_schedule_with_pydantic(schedule_data):
|
if self._validate_schedule_with_pydantic(schedule_data):
|
||||||
@@ -164,7 +164,7 @@ class ScheduleManager:
|
|||||||
else:
|
else:
|
||||||
logger.warning("数据库中的日程数据格式无效,将异步重新生成日程")
|
logger.warning("数据库中的日程数据格式无效,将异步重新生成日程")
|
||||||
await self.generate_and_save_schedule()
|
await self.generate_and_save_schedule()
|
||||||
except json.JSONDecodeError as e:
|
except orjson.JSONDecodeError as e:
|
||||||
logger.error(f"日程数据JSON解析失败: {e},将异步重新生成日程")
|
logger.error(f"日程数据JSON解析失败: {e},将异步重新生成日程")
|
||||||
await self.generate_and_save_schedule()
|
await self.generate_and_save_schedule()
|
||||||
else:
|
else:
|
||||||
@@ -282,7 +282,7 @@ class ScheduleManager:
|
|||||||
response, _ = await self.llm.generate_response_async(prompt)
|
response, _ = await self.llm.generate_response_async(prompt)
|
||||||
|
|
||||||
# 尝试解析和验证JSON(项目内置的反截断机制会自动处理截断问题)
|
# 尝试解析和验证JSON(项目内置的反截断机制会自动处理截断问题)
|
||||||
schedule_data = json.loads(repair_json(response))
|
schedule_data = orjson.loads(repair_json(response))
|
||||||
|
|
||||||
# 使用Pydantic验证生成的日程数据
|
# 使用Pydantic验证生成的日程数据
|
||||||
if self._validate_schedule_with_pydantic(schedule_data):
|
if self._validate_schedule_with_pydantic(schedule_data):
|
||||||
@@ -293,14 +293,14 @@ class ScheduleManager:
|
|||||||
if existing_schedule:
|
if existing_schedule:
|
||||||
# 更新现有日程
|
# 更新现有日程
|
||||||
session.query(Schedule).filter(Schedule.date == today_str).update({
|
session.query(Schedule).filter(Schedule.date == today_str).update({
|
||||||
Schedule.schedule_data: json.dumps(schedule_data),
|
Schedule.schedule_data: orjson.dumps(schedule_data).decode('utf-8'),
|
||||||
Schedule.updated_at: datetime.now()
|
Schedule.updated_at: datetime.now()
|
||||||
})
|
})
|
||||||
else:
|
else:
|
||||||
# 创建新日程
|
# 创建新日程
|
||||||
new_schedule = Schedule(
|
new_schedule = Schedule(
|
||||||
date=today_str,
|
date=today_str,
|
||||||
schedule_data=json.dumps(schedule_data)
|
schedule_data=orjson.dumps(schedule_data).decode('utf-8')
|
||||||
)
|
)
|
||||||
session.add(new_schedule)
|
session.add(new_schedule)
|
||||||
session.commit()
|
session.commit()
|
||||||
|
|||||||
@@ -2,7 +2,7 @@ import copy
|
|||||||
import hashlib
|
import hashlib
|
||||||
import datetime
|
import datetime
|
||||||
import asyncio
|
import asyncio
|
||||||
import json
|
import orjson
|
||||||
import time
|
import time
|
||||||
|
|
||||||
from json_repair import repair_json
|
from json_repair import repair_json
|
||||||
@@ -155,9 +155,9 @@ class PersonInfoManager:
|
|||||||
for key in JSON_SERIALIZED_FIELDS:
|
for key in JSON_SERIALIZED_FIELDS:
|
||||||
if key in final_data:
|
if key in final_data:
|
||||||
if isinstance(final_data[key], (list, dict)):
|
if isinstance(final_data[key], (list, dict)):
|
||||||
final_data[key] = json.dumps(final_data[key], ensure_ascii=False)
|
final_data[key] = orjson.dumps(final_data[key]).decode('utf-8')
|
||||||
elif final_data[key] is None: # Default for lists is [], store as "[]"
|
elif final_data[key] is None: # Default for lists is [], store as "[]"
|
||||||
final_data[key] = json.dumps([], ensure_ascii=False)
|
final_data[key] = orjson.dumps([]).decode('utf-8')
|
||||||
# If it's already a string, assume it's valid JSON or a non-JSON string field
|
# If it's already a string, assume it's valid JSON or a non-JSON string field
|
||||||
|
|
||||||
def _db_create_sync(p_data: dict):
|
def _db_create_sync(p_data: dict):
|
||||||
@@ -204,9 +204,9 @@ class PersonInfoManager:
|
|||||||
for key in JSON_SERIALIZED_FIELDS:
|
for key in JSON_SERIALIZED_FIELDS:
|
||||||
if key in final_data:
|
if key in final_data:
|
||||||
if isinstance(final_data[key], (list, dict)):
|
if isinstance(final_data[key], (list, dict)):
|
||||||
final_data[key] = json.dumps(final_data[key], ensure_ascii=False)
|
final_data[key] = orjson.dumps(final_data[key]).decode('utf-8')
|
||||||
elif final_data[key] is None: # Default for lists is [], store as "[]"
|
elif final_data[key] is None: # Default for lists is [], store as "[]"
|
||||||
final_data[key] = json.dumps([], ensure_ascii=False)
|
final_data[key] = orjson.dumps([]).decode('utf-8')
|
||||||
|
|
||||||
def _db_safe_create_sync(p_data: dict):
|
def _db_safe_create_sync(p_data: dict):
|
||||||
with get_db_session() as session:
|
with get_db_session() as session:
|
||||||
@@ -243,9 +243,9 @@ class PersonInfoManager:
|
|||||||
processed_value = value
|
processed_value = value
|
||||||
if field_name in JSON_SERIALIZED_FIELDS:
|
if field_name in JSON_SERIALIZED_FIELDS:
|
||||||
if isinstance(value, (list, dict)):
|
if isinstance(value, (list, dict)):
|
||||||
processed_value = json.dumps(value, ensure_ascii=False, indent=None)
|
processed_value = orjson.dumps(value).decode('utf-8')
|
||||||
elif value is None: # Store None as "[]" for JSON list fields
|
elif value is None: # Store None as "[]" for JSON list fields
|
||||||
processed_value = json.dumps([], ensure_ascii=False, indent=None)
|
processed_value = orjson.dumps([]).decode('utf-8')
|
||||||
|
|
||||||
def _db_update_sync(p_id: str, f_name: str, val_to_set):
|
def _db_update_sync(p_id: str, f_name: str, val_to_set):
|
||||||
|
|
||||||
@@ -324,7 +324,7 @@ class PersonInfoManager:
|
|||||||
try:
|
try:
|
||||||
fixed_json = repair_json(text)
|
fixed_json = repair_json(text)
|
||||||
if isinstance(fixed_json, str):
|
if isinstance(fixed_json, str):
|
||||||
parsed_json = json.loads(fixed_json)
|
parsed_json = orjson.loads(fixed_json)
|
||||||
else:
|
else:
|
||||||
parsed_json = fixed_json
|
parsed_json = fixed_json
|
||||||
|
|
||||||
@@ -494,8 +494,8 @@ class PersonInfoManager:
|
|||||||
if f_name in JSON_SERIALIZED_FIELDS:
|
if f_name in JSON_SERIALIZED_FIELDS:
|
||||||
if isinstance(val, str):
|
if isinstance(val, str):
|
||||||
try:
|
try:
|
||||||
return json.loads(val)
|
return orjson.loads(val)
|
||||||
except json.JSONDecodeError:
|
except orjson.JSONDecodeError:
|
||||||
logger.warning(f"字段 {f_name} for {p_id} 包含无效JSON: {val}. 返回默认值.")
|
logger.warning(f"字段 {f_name} for {p_id} 包含无效JSON: {val}. 返回默认值.")
|
||||||
return [] # Default for JSON fields on error
|
return [] # Default for JSON fields on error
|
||||||
elif val is None: # Field exists in DB but is None
|
elif val is None: # Field exists in DB but is None
|
||||||
@@ -531,8 +531,8 @@ class PersonInfoManager:
|
|||||||
if field_name in JSON_SERIALIZED_FIELDS:
|
if field_name in JSON_SERIALIZED_FIELDS:
|
||||||
if isinstance(val, str):
|
if isinstance(val, str):
|
||||||
try:
|
try:
|
||||||
return json.loads(val)
|
return orjson.loads(val)
|
||||||
except json.JSONDecodeError:
|
except orjson.JSONDecodeError:
|
||||||
logger.warning(f"字段 {field_name} for {person_id} 包含无效JSON: {val}. 返回默认值.")
|
logger.warning(f"字段 {field_name} for {person_id} 包含无效JSON: {val}. 返回默认值.")
|
||||||
return []
|
return []
|
||||||
elif val is None:
|
elif val is None:
|
||||||
@@ -671,9 +671,9 @@ class PersonInfoManager:
|
|||||||
for key in JSON_SERIALIZED_FIELDS:
|
for key in JSON_SERIALIZED_FIELDS:
|
||||||
if key in initial_data:
|
if key in initial_data:
|
||||||
if isinstance(initial_data[key], (list, dict)):
|
if isinstance(initial_data[key], (list, dict)):
|
||||||
initial_data[key] = json.dumps(initial_data[key], ensure_ascii=False)
|
initial_data[key] = orjson.dumps(initial_data[key]).decode('utf-8')
|
||||||
elif initial_data[key] is None:
|
elif initial_data[key] is None:
|
||||||
initial_data[key] = json.dumps([], ensure_ascii=False)
|
initial_data[key] = orjson.dumps([]).decode('utf-8')
|
||||||
|
|
||||||
# 获取 SQLAlchemy 模odel的所有字段名
|
# 获取 SQLAlchemy 模odel的所有字段名
|
||||||
model_fields = [column.name for column in PersonInfo.__table__.columns]
|
model_fields = [column.name for column in PersonInfo.__table__.columns]
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
import time
|
import time
|
||||||
import traceback
|
import traceback
|
||||||
import json
|
import orjson
|
||||||
import random
|
import random
|
||||||
|
|
||||||
from typing import List, Dict, Any
|
from typing import List, Dict, Any
|
||||||
@@ -187,7 +187,7 @@ class RelationshipFetcher:
|
|||||||
content, _ = await self.llm_model.generate_response_async(prompt=prompt)
|
content, _ = await self.llm_model.generate_response_async(prompt=prompt)
|
||||||
|
|
||||||
if content:
|
if content:
|
||||||
content_json = json.loads(repair_json(content))
|
content_json = orjson.loads(repair_json(content))
|
||||||
|
|
||||||
# 检查是否返回了不需要查询的标志
|
# 检查是否返回了不需要查询的标志
|
||||||
if "none" in content_json:
|
if "none" in content_json:
|
||||||
@@ -329,7 +329,7 @@ class RelationshipFetcher:
|
|||||||
content, _ = await self.instant_llm_model.generate_response_async(prompt=prompt)
|
content, _ = await self.instant_llm_model.generate_response_async(prompt=prompt)
|
||||||
|
|
||||||
if content:
|
if content:
|
||||||
content_json = json.loads(repair_json(content))
|
content_json = orjson.loads(repair_json(content))
|
||||||
if info_type in content_json:
|
if info_type in content_json:
|
||||||
info_content = content_json[info_type]
|
info_content = content_json[info_type]
|
||||||
is_unknown = info_content == "none" or not info_content
|
is_unknown = info_content == "none" or not info_content
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ import random
|
|||||||
from src.llm_models.utils_model import LLMRequest
|
from src.llm_models.utils_model import LLMRequest
|
||||||
from src.config.config import global_config, model_config
|
from src.config.config import global_config, model_config
|
||||||
from src.chat.utils.chat_message_builder import build_readable_messages
|
from src.chat.utils.chat_message_builder import build_readable_messages
|
||||||
import json
|
import orjson
|
||||||
from json_repair import repair_json
|
from json_repair import repair_json
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from difflib import SequenceMatcher
|
from difflib import SequenceMatcher
|
||||||
@@ -183,7 +183,7 @@ class RelationshipManager:
|
|||||||
# 解析JSON并转换为元组列表
|
# 解析JSON并转换为元组列表
|
||||||
try:
|
try:
|
||||||
points = repair_json(points)
|
points = repair_json(points)
|
||||||
points_data = json.loads(points)
|
points_data = orjson.loads(points)
|
||||||
|
|
||||||
# 只处理正确的格式,错误格式直接跳过
|
# 只处理正确的格式,错误格式直接跳过
|
||||||
if points_data == "none" or not points_data:
|
if points_data == "none" or not points_data:
|
||||||
@@ -220,7 +220,7 @@ class RelationshipManager:
|
|||||||
logger_str += f"({discarded_count} 条因重要性低被丢弃)\n"
|
logger_str += f"({discarded_count} 条因重要性低被丢弃)\n"
|
||||||
logger.info(logger_str)
|
logger.info(logger_str)
|
||||||
|
|
||||||
except json.JSONDecodeError:
|
except orjson.JSONDecodeError:
|
||||||
logger.error(f"解析points JSON失败: {points}")
|
logger.error(f"解析points JSON失败: {points}")
|
||||||
return
|
return
|
||||||
except (KeyError, TypeError) as e:
|
except (KeyError, TypeError) as e:
|
||||||
@@ -230,15 +230,15 @@ class RelationshipManager:
|
|||||||
current_points = await person_info_manager.get_value(person_id, "points") or []
|
current_points = await person_info_manager.get_value(person_id, "points") or []
|
||||||
if isinstance(current_points, str):
|
if isinstance(current_points, str):
|
||||||
try:
|
try:
|
||||||
current_points = json.loads(current_points)
|
current_points = orjson.loads(current_points)
|
||||||
except json.JSONDecodeError:
|
except orjson.JSONDecodeError:
|
||||||
logger.error(f"解析points JSON失败: {current_points}")
|
logger.error(f"解析points JSON失败: {current_points}")
|
||||||
current_points = []
|
current_points = []
|
||||||
elif not isinstance(current_points, list):
|
elif not isinstance(current_points, list):
|
||||||
current_points = []
|
current_points = []
|
||||||
current_points.extend(points_list)
|
current_points.extend(points_list)
|
||||||
await person_info_manager.update_one_field(
|
await person_info_manager.update_one_field(
|
||||||
person_id, "points", json.dumps(current_points, ensure_ascii=False, indent=None)
|
person_id, "points", orjson.dumps(current_points).decode('utf-8')
|
||||||
)
|
)
|
||||||
|
|
||||||
# 将新记录添加到现有记录中
|
# 将新记录添加到现有记录中
|
||||||
@@ -286,7 +286,7 @@ class RelationshipManager:
|
|||||||
|
|
||||||
# 更新数据库
|
# 更新数据库
|
||||||
await person_info_manager.update_one_field(
|
await person_info_manager.update_one_field(
|
||||||
person_id, "points", json.dumps(current_points, ensure_ascii=False, indent=None)
|
person_id, "points", orjson.dumps(current_points).decode('utf-8')
|
||||||
)
|
)
|
||||||
|
|
||||||
await person_info_manager.update_one_field(person_id, "know_times", know_times + 1)
|
await person_info_manager.update_one_field(person_id, "know_times", know_times + 1)
|
||||||
@@ -331,8 +331,8 @@ class RelationshipManager:
|
|||||||
forgotten_points = await person_info_manager.get_value(person_id, "forgotten_points") or []
|
forgotten_points = await person_info_manager.get_value(person_id, "forgotten_points") or []
|
||||||
if isinstance(forgotten_points, str):
|
if isinstance(forgotten_points, str):
|
||||||
try:
|
try:
|
||||||
forgotten_points = json.loads(forgotten_points)
|
forgotten_points = orjson.loads(forgotten_points)
|
||||||
except json.JSONDecodeError:
|
except orjson.JSONDecodeError:
|
||||||
logger.error(f"解析forgotten_points JSON失败: {forgotten_points}")
|
logger.error(f"解析forgotten_points JSON失败: {forgotten_points}")
|
||||||
forgotten_points = []
|
forgotten_points = []
|
||||||
elif not isinstance(forgotten_points, list):
|
elif not isinstance(forgotten_points, list):
|
||||||
@@ -461,7 +461,7 @@ class RelationshipManager:
|
|||||||
relation_value_response, _ = await self.relationship_llm.generate_response_async(
|
relation_value_response, _ = await self.relationship_llm.generate_response_async(
|
||||||
prompt=relation_value_prompt
|
prompt=relation_value_prompt
|
||||||
)
|
)
|
||||||
relation_value_json = json.loads(repair_json(relation_value_response))
|
relation_value_json = orjson.loads(repair_json(relation_value_response))
|
||||||
|
|
||||||
# 从LLM获取新生成的值
|
# 从LLM获取新生成的值
|
||||||
new_attitude = int(relation_value_json.get("attitude", 50))
|
new_attitude = int(relation_value_json.get("attitude", 50))
|
||||||
@@ -483,17 +483,17 @@ class RelationshipManager:
|
|||||||
|
|
||||||
await person_info_manager.update_one_field(person_id, "attitude", attitude)
|
await person_info_manager.update_one_field(person_id, "attitude", attitude)
|
||||||
logger.info(f"更新了与 {person_name} 的态度: {attitude}")
|
logger.info(f"更新了与 {person_name} 的态度: {attitude}")
|
||||||
except (json.JSONDecodeError, ValueError, TypeError) as e:
|
except (orjson.JSONDecodeError, ValueError, TypeError) as e:
|
||||||
logger.error(f"解析relation_value JSON失败或值无效: {e}, 响应: {relation_value_response}")
|
logger.error(f"解析relation_value JSON失败或值无效: {e}, 响应: {relation_value_response}")
|
||||||
|
|
||||||
forgotten_points = []
|
forgotten_points = []
|
||||||
info_list = []
|
info_list = []
|
||||||
await person_info_manager.update_one_field(
|
await person_info_manager.update_one_field(
|
||||||
person_id, "info_list", json.dumps(info_list, ensure_ascii=False, indent=None)
|
person_id, "info_list", orjson.dumps(info_list).decode('utf-8')
|
||||||
)
|
)
|
||||||
|
|
||||||
await person_info_manager.update_one_field(
|
await person_info_manager.update_one_field(
|
||||||
person_id, "forgotten_points", json.dumps(forgotten_points, ensure_ascii=False, indent=None)
|
person_id, "forgotten_points", orjson.dumps(forgotten_points).decode('utf-8')
|
||||||
)
|
)
|
||||||
|
|
||||||
return current_points
|
return current_points
|
||||||
|
|||||||
@@ -2,7 +2,7 @@ from abc import ABC, abstractmethod
|
|||||||
from typing import Dict, List, Any, Union
|
from typing import Dict, List, Any, Union
|
||||||
import os
|
import os
|
||||||
import toml
|
import toml
|
||||||
import json
|
import orjson
|
||||||
import shutil
|
import shutil
|
||||||
import datetime
|
import datetime
|
||||||
|
|
||||||
@@ -144,14 +144,14 @@ class PluginBase(ABC):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
with open(manifest_path, "r", encoding="utf-8") as f:
|
with open(manifest_path, "r", encoding="utf-8") as f:
|
||||||
self.manifest_data = json.load(f)
|
self.manifest_data = orjson.loads(f.read())
|
||||||
|
|
||||||
logger.debug(f"{self.log_prefix} 成功加载manifest文件: {manifest_path}")
|
logger.debug(f"{self.log_prefix} 成功加载manifest文件: {manifest_path}")
|
||||||
|
|
||||||
# 验证manifest格式
|
# 验证manifest格式
|
||||||
self._validate_manifest()
|
self._validate_manifest()
|
||||||
|
|
||||||
except json.JSONDecodeError as e:
|
except orjson.JSONDecodeError as e:
|
||||||
error_msg = f"{self.log_prefix} manifest文件格式错误: {e}"
|
error_msg = f"{self.log_prefix} manifest文件格式错误: {e}"
|
||||||
logger.error(error_msg)
|
logger.error(error_msg)
|
||||||
raise ValueError(error_msg) # noqa
|
raise ValueError(error_msg) # noqa
|
||||||
|
|||||||
@@ -453,7 +453,7 @@ class ManifestValidator:
|
|||||||
# try:
|
# try:
|
||||||
# manifest_path = os.path.join(plugin_dir, "_manifest.json")
|
# manifest_path = os.path.join(plugin_dir, "_manifest.json")
|
||||||
# with open(manifest_path, "w", encoding="utf-8") as f:
|
# with open(manifest_path, "w", encoding="utf-8") as f:
|
||||||
# json.dump(manifest_data, f, ensure_ascii=False, indent=2)
|
# orjson.dumps(manifest_data, f, ensure_ascii=False, indent=2)
|
||||||
# logger.info(f"Manifest文件已保存: {manifest_path}")
|
# logger.info(f"Manifest文件已保存: {manifest_path}")
|
||||||
# return True
|
# return True
|
||||||
# except Exception as e:
|
# except Exception as e:
|
||||||
@@ -478,7 +478,7 @@ class ManifestValidator:
|
|||||||
|
|
||||||
# try:
|
# try:
|
||||||
# with open(manifest_path, "r", encoding="utf-8") as f:
|
# with open(manifest_path, "r", encoding="utf-8") as f:
|
||||||
# manifest_data = json.load(f)
|
# manifest_data = orjson.loads(f.read())
|
||||||
|
|
||||||
# validator = ManifestValidator()
|
# validator = ManifestValidator()
|
||||||
# is_valid = validator.validate_manifest(manifest_data)
|
# is_valid = validator.validate_manifest(manifest_data)
|
||||||
|
|||||||
@@ -3,7 +3,7 @@
|
|||||||
Cookie服务模块
|
Cookie服务模块
|
||||||
负责从多种来源获取、缓存和管理QZone的Cookie。
|
负责从多种来源获取、缓存和管理QZone的Cookie。
|
||||||
"""
|
"""
|
||||||
import json
|
import orjson
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Callable, Optional, Dict
|
from typing import Callable, Optional, Dict
|
||||||
|
|
||||||
@@ -33,7 +33,7 @@ class CookieService:
|
|||||||
cookie_file_path = self._get_cookie_file_path(qq_account)
|
cookie_file_path = self._get_cookie_file_path(qq_account)
|
||||||
try:
|
try:
|
||||||
with open(cookie_file_path, "w", encoding="utf-8") as f:
|
with open(cookie_file_path, "w", encoding="utf-8") as f:
|
||||||
json.dump(cookies, f)
|
f.write(orjson.dumps(cookies, option=orjson.OPT_INDENT_2).decode('utf-8'))
|
||||||
logger.info(f"Cookie已成功缓存至: {cookie_file_path}")
|
logger.info(f"Cookie已成功缓存至: {cookie_file_path}")
|
||||||
except IOError as e:
|
except IOError as e:
|
||||||
logger.error(f"无法写入Cookie文件 {cookie_file_path}: {e}")
|
logger.error(f"无法写入Cookie文件 {cookie_file_path}: {e}")
|
||||||
@@ -44,8 +44,8 @@ class CookieService:
|
|||||||
if cookie_file_path.exists():
|
if cookie_file_path.exists():
|
||||||
try:
|
try:
|
||||||
with open(cookie_file_path, "r", encoding="utf-8") as f:
|
with open(cookie_file_path, "r", encoding="utf-8") as f:
|
||||||
return json.load(f)
|
return orjson.loads(f.read())
|
||||||
except (IOError, json.JSONDecodeError) as e:
|
except (IOError, orjson.JSONDecodeError) as e:
|
||||||
logger.error(f"无法读取或解析Cookie文件 {cookie_file_path}: {e}")
|
logger.error(f"无法读取或解析Cookie文件 {cookie_file_path}: {e}")
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ QQ空间服务模块
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
import json
|
import orjson
|
||||||
import os
|
import os
|
||||||
import random
|
import random
|
||||||
import time
|
import time
|
||||||
@@ -291,14 +291,14 @@ class QZoneService:
|
|||||||
cookie_str = cookie_data["cookies"]
|
cookie_str = cookie_data["cookies"]
|
||||||
parsed_cookies = {k.strip(): v.strip() for k, v in (p.split('=', 1) for p in cookie_str.split('; ') if '=' in p)}
|
parsed_cookies = {k.strip(): v.strip() for k, v in (p.split('=', 1) for p in cookie_str.split('; ') if '=' in p)}
|
||||||
with open(cookie_file_path, "w", encoding="utf-8") as f:
|
with open(cookie_file_path, "w", encoding="utf-8") as f:
|
||||||
json.dump(parsed_cookies, f)
|
orjson.dump(parsed_cookies, f)
|
||||||
logger.info(f"Cookie已更新并保存至: {cookie_file_path}")
|
logger.info(f"Cookie已更新并保存至: {cookie_file_path}")
|
||||||
return parsed_cookies
|
return parsed_cookies
|
||||||
|
|
||||||
# 如果HTTP获取失败,尝试读取本地文件
|
# 如果HTTP获取失败,尝试读取本地文件
|
||||||
if cookie_file_path.exists():
|
if cookie_file_path.exists():
|
||||||
with open(cookie_file_path, "r", encoding="utf-8") as f:
|
with open(cookie_file_path, "r", encoding="utf-8") as f:
|
||||||
return json.load(f)
|
return orjson.loads(f)
|
||||||
return None
|
return None
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"更新或加载Cookie时发生异常: {e}")
|
logger.error(f"更新或加载Cookie时发生异常: {e}")
|
||||||
@@ -422,7 +422,7 @@ class QZoneService:
|
|||||||
logger.warning("所有图片上传失败,将发布纯文本说说")
|
logger.warning("所有图片上传失败,将发布纯文本说说")
|
||||||
|
|
||||||
res_text = await _request("POST", self.EMOTION_PUBLISH_URL, params={"g_tk": gtk}, data=post_data)
|
res_text = await _request("POST", self.EMOTION_PUBLISH_URL, params={"g_tk": gtk}, data=post_data)
|
||||||
result = json.loads(res_text)
|
result = orjson.loads(res_text)
|
||||||
tid = result.get("tid", "")
|
tid = result.get("tid", "")
|
||||||
|
|
||||||
if tid:
|
if tid:
|
||||||
@@ -576,7 +576,7 @@ class QZoneService:
|
|||||||
}
|
}
|
||||||
res_text = await _request("GET", self.LIST_URL, params=params)
|
res_text = await _request("GET", self.LIST_URL, params=params)
|
||||||
json_str = res_text[len("_preloadCallback(") : -2]
|
json_str = res_text[len("_preloadCallback(") : -2]
|
||||||
json_data = json.loads(json_str)
|
json_data = orjson.loads(json_str)
|
||||||
|
|
||||||
if json_data.get("code") != 0:
|
if json_data.get("code") != 0:
|
||||||
return []
|
return []
|
||||||
|
|||||||
@@ -3,7 +3,7 @@
|
|||||||
历史记录工具模块
|
历史记录工具模块
|
||||||
提供用于获取QQ空间发送历史的功能。
|
提供用于获取QQ空间发送历史的功能。
|
||||||
"""
|
"""
|
||||||
import json
|
import orjson
|
||||||
import os
|
import os
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Dict, Any, Optional, List
|
from typing import Dict, Any, Optional, List
|
||||||
@@ -30,7 +30,7 @@ class _CookieManager:
|
|||||||
if os.path.exists(cookie_file):
|
if os.path.exists(cookie_file):
|
||||||
try:
|
try:
|
||||||
with open(cookie_file, 'r', encoding='utf-8') as f:
|
with open(cookie_file, 'r', encoding='utf-8') as f:
|
||||||
return json.load(f)
|
return orjson.loads(f.read())
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"加载Cookie文件失败: {e}")
|
logger.error(f"加载Cookie文件失败: {e}")
|
||||||
return None
|
return None
|
||||||
@@ -67,7 +67,7 @@ class _SimpleQZoneAPI:
|
|||||||
|
|
||||||
data = res.text
|
data = res.text
|
||||||
json_str = data[len('_preloadCallback('):-2] if data.startswith('_preloadCallback(') else data
|
json_str = data[len('_preloadCallback('):-2] if data.startswith('_preloadCallback(') else data
|
||||||
json_data = json.loads(json_str)
|
json_data = orjson.loads(json_str)
|
||||||
|
|
||||||
return json_data.get("msglist", [])
|
return json_data.get("msglist", [])
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
# mmc/src/schedule/plan_generator.py
|
# mmc/src/schedule/plan_generator.py
|
||||||
|
|
||||||
import json
|
import orjson
|
||||||
from typing import List
|
from typing import List
|
||||||
from pydantic import BaseModel, ValidationError
|
from pydantic import BaseModel, ValidationError
|
||||||
from json_repair import repair_json
|
from json_repair import repair_json
|
||||||
@@ -95,7 +95,7 @@ class PlanGenerator:
|
|||||||
|
|
||||||
# 修复并解析JSON
|
# 修复并解析JSON
|
||||||
repaired_json_str = repair_json(clean_content)
|
repaired_json_str = repair_json(clean_content)
|
||||||
data = json.loads(repaired_json_str)
|
data = orjson.loads(repaired_json_str)
|
||||||
|
|
||||||
# 使用Pydantic进行验证
|
# 使用Pydantic进行验证
|
||||||
validated_response = PlanResponse.model_validate(data)
|
validated_response = PlanResponse.model_validate(data)
|
||||||
@@ -104,7 +104,7 @@ class PlanGenerator:
|
|||||||
logger.info(f"成功生成并验证了 {len(plans)} 个月度计划。")
|
logger.info(f"成功生成并验证了 {len(plans)} 个月度计划。")
|
||||||
return plans
|
return plans
|
||||||
|
|
||||||
except json.JSONDecodeError:
|
except orjson.JSONDecodeError:
|
||||||
logger.error(f"修复后仍然无法解析LLM返回的JSON: {llm_content}")
|
logger.error(f"修复后仍然无法解析LLM返回的JSON: {llm_content}")
|
||||||
return []
|
return []
|
||||||
except ValidationError as e:
|
except ValidationError as e:
|
||||||
|
|||||||
@@ -3,7 +3,7 @@ MaiBot 端的消息切片处理模块
|
|||||||
用于接收和重组来自 Napcat-Adapter 的切片消息
|
用于接收和重组来自 Napcat-Adapter 的切片消息
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import json
|
import orjson
|
||||||
import time
|
import time
|
||||||
import asyncio
|
import asyncio
|
||||||
from typing import Dict, Any, Optional
|
from typing import Dict, Any, Optional
|
||||||
@@ -131,8 +131,8 @@ class MessageReassembler:
|
|||||||
|
|
||||||
# 尝试反序列化重组后的消息
|
# 尝试反序列化重组后的消息
|
||||||
try:
|
try:
|
||||||
return json.loads(reassembled_message)
|
return orjson.loads(reassembled_message)
|
||||||
except json.JSONDecodeError as e:
|
except orjson.JSONDecodeError as e:
|
||||||
logger.error(f"重组消息反序列化失败: {e}")
|
logger.error(f"重组消息反序列化失败: {e}")
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user