1. 增加了请求框架

2. 增加了删除虚拟环境的脚本
This commit is contained in:
2026-01-12 14:20:44 +08:00
parent 45276a7787
commit bda8f13446
68 changed files with 9067 additions and 1 deletions

View File

@@ -45,7 +45,13 @@
"Bash(./stop.sh:*)",
"Bash(./xrequest/Scripts/python.exe -m pip:*)",
"Bash(echo \"1. 访问: http://localhost:9999/web/pages/main.html\")",
"Bash(echo:*)"
"Bash(echo:*)",
"Bash(git rm --cached -r:*)",
"Bash(git reset HEAD X-Request/)",
"Bash(git add:*)",
"Bash(git rm:*)",
"Bash(git check-ignore:*)",
"Bash(./setup.sh:*)"
]
}
}

14
.gitignore vendored
View File

@@ -174,3 +174,17 @@ cython_debug/
# PyPI configuration file
.pypirc
# Virtual Environments (项目特定的虚拟环境)
# 排除 X-Request 目录下的虚拟环境
X-Request/xrequest/
X-Request/venv/
X-Request/env/
X-Request/.venv/
X-Request/ENV/
X-Request/ENV.bak/
X-Request/venv.bak/
# X-Request 项目 (如果它有自己的 .git 仓库,则整个排除)
# 如果 X-Request 是子模块,请删除此行
X-Request/

84
request/cleanup.sh Normal file
View File

@@ -0,0 +1,84 @@
#!/usr/bin/env bash
set -euo pipefail
# 永远从脚本所在目录运行(避免在别的目录执行导致路径错误)
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
cd "$SCRIPT_DIR"
echo "🧹 X-Request 框架环境清理"
echo "=========================="
# 函数:加载 .env 文件中的变量
load_env_file() {
local env_file=".env"
if [ -f "$env_file" ]; then
while IFS='=' read -r key value; do
[[ "$key" =~ ^#.*$ ]] && continue
[[ -z "$key" ]] && continue
value=$(echo "$value" | sed 's/^["'\'']//' | sed 's/["'\'']$//')
export "$key=$value"
done < "$env_file"
fi
}
# 加载环境配置
load_env_file
# 检查虚拟环境是否存在
if [ ! -d "xrequest" ]; then
echo "⚠️ 虚拟环境不存在,无需清理"
exit 0
fi
echo "📋 检测到虚拟环境: xrequest"
# 询问用户确认
read -p "确定要删除虚拟环境吗?(y/N): " confirm
if [[ ! "$confirm" =~ ^[Yy]$ ]]; then
echo "❌ 操作已取消"
exit 0
fi
# 删除虚拟环境
echo "🗑️ 正在删除虚拟环境..."
rm -rf xrequest
if [ $? -eq 0 ]; then
echo "✅ 虚拟环境已删除"
else
echo "❌ 虚拟环境删除失败"
exit 1
fi
# 询问是否清理日志
if [ -d "${LOGS_DIR:-logs}" ]; then
echo ""
read -p "是否也要清理日志目录?(y/N): " clean_logs
if [[ "$clean_logs" =~ ^[Yy]$ ]]; then
echo "🗑️ 正在清理日志目录..."
rm -rf "${LOGS_DIR:-logs}"
if [ $? -eq 0 ]; then
echo "✅ 日志目录已清理"
else
echo "⚠️ 日志目录清理失败"
fi
fi
fi
# 询问是否清理 __pycache__ 和 .pyc 文件
echo ""
read -p "是否清理 Python 缓存文件 (__pycache__, *.pyc)(y/N): " clean_cache
if [[ "$clean_cache" =~ ^[Yy]$ ]]; then
echo "🗑️ 正在清理 Python 缓存..."
find . -type d -name "__pycache__" -exec rm -rf {} + 2>/dev/null || true
find . -type f -name "*.pyc" -delete 2>/dev/null || true
echo "✅ Python 缓存已清理"
fi
echo ""
echo "🎉 清理完成!"
echo ""
echo "📝 如需重新设置环境,请运行:"
echo " ./setup.sh"
echo ""

39
request/fix_newlines.py Normal file
View File

@@ -0,0 +1,39 @@
#!/usr/bin/env python3
"""
修复文件的换行符,将 Windows 风格的 \r\n 转换为 Unix 风格的 \n
用法python fix_newlines.py <file1> <file2> ...
"""
import sys
import os
def fix_newlines(file_path):
"""修复文件的换行符"""
print(f"修复文件: {file_path}")
try:
# 读取文件内容
with open(file_path, 'r', encoding='utf-8') as f:
content = f.read()
# 写入文件,使用 Unix 风格的换行符
with open(file_path, 'w', encoding='utf-8', newline='\n') as f:
f.write(content)
print(f"✅ 修复成功: {file_path}")
return True
except Exception as e:
print(f"❌ 修复失败: {file_path}, 错误: {e}")
return False
if __name__ == "__main__":
if len(sys.argv) < 2:
print("用法: python fix_newlines.py <file1> <file2> ...")
sys.exit(1)
# 修复所有指定的文件
for file_path in sys.argv[1:]:
if os.path.exists(file_path):
fix_newlines(file_path)
else:
print(f"❌ 文件不存在: {file_path}")

View File

@@ -0,0 +1 @@
3335

56
request/main.py Normal file
View File

@@ -0,0 +1,56 @@
#!/usr/bin/env python3
"""
X-Request 高性能FastAPI框架
主程序入口
"""
import uvicorn
import os
import sys
from pathlib import Path
# 将src目录添加到Python路径
sys.path.insert(0, str(Path(__file__).parent / "src"))
from src.core import get_app
from src.config import settings
from src.utils import logger_manager
# 配置日志系统
logger_manager.configure(
log_level=settings.log_level,
log_format=settings.log_format,
log_file=settings.log_file,
log_to_console=settings.log_to_console
)
# 创建FastAPI应用在模块级别方便uvicorn导入
app = get_app()
def main():
"""主函数"""
# 打印启动信息
print(f"[X-Request] 高性能FastAPI框架正在启动...")
print(f"[地址] 监听地址: {settings.host}:{settings.port}")
print(f"[文档] API文档: http://{settings.host}:{settings.port}/docs")
print(f"[健康] 健康检查: http://{settings.host}:{settings.port}/health")
print(f"[信息] 应用信息: http://{settings.host}:{settings.port}/info")
print(f"\n[成功] 应用已成功启动!按 Ctrl+C 停止服务器\n")
# 启动服务器
uvicorn.run(
app,
host=settings.host,
port=settings.port,
workers=1, # 简化配置,使用单进程
reload=False, # 禁用热重载
log_level="info", # 显示信息级别的日志
access_log=False, # 关闭访问日志
use_colors=True, # 启用颜色输出
)
if __name__ == "__main__":
main()

17
request/requirements.txt Normal file
View File

@@ -0,0 +1,17 @@
# 核心依赖
fastapi==0.104.1
uvicorn[standard]==0.24.0
pydantic==2.5.0
pydantic-settings==2.1.0
psutil==5.9.8
# 日志相关
structlog==23.2.0
colorama==0.4.6
python-json-logger==2.0.7
# 异步文件操作
aiofiles==23.2.1
# 表单/文件上传FastAPI UploadFile/Form 需要)
python-multipart==0.0.9

300
request/setup.sh Normal file
View File

@@ -0,0 +1,300 @@
#!/usr/bin/env bash
set -euo pipefail
# 永远从脚本所在目录运行(避免在别的目录执行导致 requirements/.env/venv 路径错误)
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
cd "$SCRIPT_DIR"
# 函数:加载 .env 文件中的变量
load_env_file() {
local env_file=".env"
if [ -f "$env_file" ]; then
while IFS='=' read -r key value; do
# 跳过注释和空行
[[ "$key" =~ ^#.*$ ]] && continue
[[ -z "$key" ]] && continue
# 移除值两端的引号(如果有)
value=$(echo "$value" | sed 's/^["'\'']//' | sed 's/["'\'']$//')
# 导出变量
export "$key=$value"
done < "$env_file"
fi
}
echo "🔧 X-Request 框架环境设置"
echo "=========================="
# 检查当前操作系统
# 首先检查是否是Windows环境
if [[ "$OSTYPE" == "msys" || "$OSTYPE" == "cygwin" || "$OSTYPE" == "win32" ]]; then
OS_TYPE="Windows"
PYTHON_CMD="python"
elif [[ "$OSTYPE" == "linux-gnu"* ]]; then
OS_TYPE="Linux"
PYTHON_CMD="python3"
elif [[ "$OSTYPE" == "darwin"* ]]; then
OS_TYPE="macOS"
PYTHON_CMD="python3"
else
# 使用uname作为后备检测
OS_TYPE="$(uname -s)"
if [[ "$OS_TYPE" == *"Windows"* || "$OS_TYPE" == "MSYS"* || "$OS_TYPE" == "MINGW"* ]]; then
OS_TYPE="Windows"
PYTHON_CMD="python"
else
PYTHON_CMD="python3"
fi
fi
# 额外检查是否在Windows PowerShell或CMD中运行
if [[ "$SHELL" == *"powershell"* || "$SHELL" == *"cmd.exe"* ]]; then
OS_TYPE="Windows"
PYTHON_CMD="python"
fi
# 检查是否存在python.exe文件Windows特有
if [[ -f "$(which python.exe 2>/dev/null)" ]]; then
OS_TYPE="Windows"
PYTHON_CMD="python.exe"
fi
echo "📋 检测到操作系统: $OS_TYPE"
echo "📋 使用Python命令: $PYTHON_CMD"
# 检查Python版本
echo "📋 检查Python版本..."
python_version=$($PYTHON_CMD --version 2>&1 | grep -Po '(?<=Python )\d+\.\d+')
if [ -z "$python_version" ]; then
python_version=$($PYTHON_CMD --version 2>&1 | awk '{print $2}' | cut -d'.' -f1,2)
fi
echo "✅ 发现Python: $python_version"
# 简单的版本比较不需要bc命令
major_version=$(echo $python_version | cut -d'.' -f1)
minor_version=$(echo $python_version | cut -d'.' -f2)
if [ "$major_version" -lt 3 ] || ([ "$major_version" -eq 3 ] && [ "$minor_version" -lt 8 ]); then
echo "❌ 需要 Python 3.8 或更高版本,当前版本: $python_version"
exit 1
fi
echo "✅ Python版本检查通过"
# 检查虚拟环境是否存在且完整
VENV_COMPLETE=false
if [ -d "xrequest" ]; then
# 检查激活脚本是否存在
if [ -f "xrequest/Scripts/activate" ] || [ -f "xrequest/bin/activate" ]; then
echo "✅ 虚拟环境已存在且完整"
VENV_COMPLETE=true
else
echo "⚠️ 虚拟环境存在但不完整,删除并重新创建..."
rm -rf xrequest
fi
fi
# 如果虚拟环境不存在或不完整,创建新的虚拟环境
if [ "$VENV_COMPLETE" = false ]; then
echo "📦 创建虚拟环境..."
# 根据操作系统类型选择不同的虚拟环境创建方式
if [ "$OS_TYPE" = "Windows" ]; then
# 在Windows上使用python -m venv
$PYTHON_CMD -m venv xrequest
if [ $? -eq 0 ]; then
# 检查虚拟环境是否创建成功
if [ -f "xrequest/Scripts/activate" ]; then
echo "✅ 虚拟环境创建完成"
else
echo "⚠️ 虚拟环境创建可能失败尝试使用pip创建..."
# 如果venv创建失败尝试使用pip安装virtualenv并创建虚拟环境
$PYTHON_CMD -m pip install --user virtualenv
$PYTHON_CMD -m virtualenv xrequest
if [ $? -eq 0 ]; then
if [ -f "xrequest/Scripts/activate" ]; then
echo "✅ 虚拟环境创建完成使用virtualenv"
else
echo "❌ 虚拟环境创建失败,请手动创建虚拟环境"
echo " 手动创建方法:"
echo " 1. 安装virtualenvpip install virtualenv"
echo " 2. 创建虚拟环境virtualenv xrequest"
echo " 3. 激活虚拟环境xrequest/Scripts/activate"
echo " 4. 安装依赖pip install -r requirements.txt"
fi
else
echo "❌ 虚拟环境创建失败,请手动创建虚拟环境"
echo " 手动创建方法:"
echo " 1. 安装virtualenvpip install virtualenv"
echo " 2. 创建虚拟环境virtualenv xrequest"
echo " 3. 激活虚拟环境xrequest/Scripts/activate"
echo " 4. 安装依赖pip install -r requirements.txt"
fi
fi
else
echo "❌ 虚拟环境创建失败,请手动创建虚拟环境"
echo " 手动创建方法:"
echo " 1. 安装virtualenvpip install virtualenv"
echo " 2. 创建虚拟环境virtualenv xrequest"
echo " 3. 激活虚拟环境xrequest/Scripts/activate"
echo " 4. 安装依赖pip install -r requirements.txt"
fi
else
# 在Linux/Mac上使用python3 -m venv
$PYTHON_CMD -m venv xrequest
if [ $? -eq 0 ]; then
# 检查虚拟环境是否创建成功
if [ -f "xrequest/bin/activate" ]; then
echo "✅ 虚拟环境创建完成"
else
echo "⚠️ 虚拟环境创建可能失败尝试使用pip创建..."
# 如果venv创建失败尝试使用pip安装virtualenv并创建虚拟环境
$PYTHON_CMD -m pip install --user virtualenv
$PYTHON_CMD -m virtualenv xrequest
if [ $? -eq 0 ]; then
if [ -f "xrequest/bin/activate" ]; then
echo "✅ 虚拟环境创建完成使用virtualenv"
else
echo "❌ 虚拟环境创建失败,请手动创建虚拟环境"
echo " 手动创建方法:"
echo " 1. 安装virtualenvpip install virtualenv"
echo " 2. 创建虚拟环境virtualenv xrequest"
echo " 3. 激活虚拟环境source xrequest/bin/activate"
echo " 4. 安装依赖pip install -r requirements.txt"
fi
else
echo "❌ 虚拟环境创建失败,请手动创建虚拟环境"
echo " 手动创建方法:"
echo " 1. 安装virtualenvpip install virtualenv"
echo " 2. 创建虚拟环境virtualenv xrequest"
echo " 3. 激活虚拟环境source xrequest/bin/activate"
echo " 4. 安装依赖pip install -r requirements.txt"
fi
fi
else
echo "❌ 虚拟环境创建失败,请手动创建虚拟环境"
echo " 在Linux上您可能需要先安装python3-venv包"
echo " sudo apt install python3.12-venv" # 针对Ubuntu/Debian系统
echo " 或者:"
echo " 1. 安装virtualenvpip install virtualenv"
echo " 2. 创建虚拟环境virtualenv xrequest"
echo " 3. 激活虚拟环境source xrequest/bin/activate"
echo " 4. 安装依赖pip install -r requirements.txt"
fi
fi
fi
# 确定激活脚本路径
# 检查是否存在 Windows 风格的激活脚本
if [ -f "xrequest/Scripts/activate" ]; then
ACTIVATE_SCRIPT="xrequest/Scripts/activate"
echo "📋 使用 Windows 风格的激活脚本: $ACTIVATE_SCRIPT"
elif [ -f "xrequest/bin/activate" ]; then
ACTIVATE_SCRIPT="xrequest/bin/activate"
echo "📋 使用 Linux/Mac 风格的激活脚本: $ACTIVATE_SCRIPT"
else
ACTIVATE_SCRIPT=""
echo "⚠️ 未找到激活脚本"
fi
# 安装依赖(强制使用虚拟环境里的 python -m pip不要因为 curl 检测失败而跳过安装)
echo "📦 安装依赖包..."
VENV_PY=""
if [ -f "xrequest/Scripts/python.exe" ]; then
VENV_PY="xrequest/Scripts/python.exe"
elif [ -f "xrequest/bin/python" ]; then
VENV_PY="xrequest/bin/python"
fi
if [ -z "$VENV_PY" ]; then
echo "❌ 未找到虚拟环境 Python无法安装依赖。请先确保虚拟环境创建成功。"
exit 1
fi
echo "📋 使用虚拟环境 Python: $VENV_PY"
echo " 📦 升级 pip/setuptools/wheel..."
$VENV_PY -m pip install --upgrade pip setuptools wheel
echo " 📚 使用 requirements.txt 安装所有依赖..."
$VENV_PY -m pip install -r requirements.txt
echo " 🔎 校验关键依赖..."
$VENV_PY -c "import uvicorn, fastapi; print('OK: uvicorn/fastapi installed')"
echo "✅ 依赖安装完成!"
# 加载 .env 文件中的变量
echo "📄 加载环境配置..."
load_env_file
# 创建日志目录(使用 .env 中的 LOGS_DIR 配置)
echo "📁 创建日志目录..."
mkdir -p "${LOGS_DIR:-logs}"
# 检查是否存在.env文件如果不存在则创建
if [ ! -f ".env" ]; then
echo "📄 创建环境配置文件..."
if [ -f ".env.example" ]; then
cp .env.example .env
echo "✅ 已从 .env.example 创建 .env 文件,可根据需要修改配置"
else
# 如果没有示例文件,创建一个基本的.env文件
cat > .env << EOF
# 应用配置
APP_NAME="X-Request API Framework"
APP_VERSION="1.0.0"
DEBUG=false
# 服务器配置
HOST="0.0.0.0"
PORT=1111
WORKERS=1
# 日志配置
LOG_LEVEL="INFO"
LOG_FILE="logs/app.log"
LOG_FORMAT="json"
LOG_TO_CONSOLE=false
# 高级日志配置
ADVANCED_LOGGING=true
LOGS_DIR="logs"
MAX_LOG_DAYS=30
ENABLE_LOG_CLEANUP=true
ROUTE_BASED_LOGGING=true
# 性能配置
MAX_REQUESTS=1000
MAX_CONNECTIONS=1000
REQUEST_TIMEOUT=30
# CORS配置
CORS_ORIGINS=["*"]
CORS_METHODS=["*"]
CORS_HEADERS=["*"]
EOF
echo "✅ 已创建基本的 .env 文件,可根据需要修改配置"
fi
fi
echo ""
echo "🎉 环境设置完成!"
echo ""
echo "🚀 启动方法:"
echo " ./start.sh"
echo " 或者:"
echo " source xrequest/bin/activate && python main.py"
echo ""
echo "📚 API文档地址:"
echo " http://localhost:${PORT:-3000}/docs"
echo "🏥 健康检查:"
echo " http://localhost:${PORT:-3000}/health"
echo ""
echo ""

7
request/src/__init__.py Normal file
View File

@@ -0,0 +1,7 @@
"""
X-Request 高性能FastAPI框架
"""
__version__ = "1.0.0"
__author__ = "X-Request Team"
__description__ = "高性能、高并发的请求框架,具有全面的日志系统"

View File

@@ -0,0 +1,11 @@
"""
API 模块
目录结构:
- internal/: 框架核心代码 (base, discovery, monitoring)
- modules/: 用户业务代码 (hello, user, ...)
"""
from .internal import BaseAPI, route, auto_register_routes, get_registered_modules_info
__all__ = ["BaseAPI", "route", "auto_register_routes", "get_registered_modules_info"]

View File

@@ -0,0 +1,13 @@
"""
框架内部模块
此目录包含框架的核心组件,不建议用户修改:
- base.py: API基类
- discovery.py: 自动发现和注册系统
- monitoring.py: 系统监控API
"""
from .base import BaseAPI, route, get, post, put, delete
from .discovery import auto_register_routes, get_registered_modules_info
__all__ = ['BaseAPI', 'route', 'get', 'post', 'put', 'delete', 'auto_register_routes', 'get_registered_modules_info']

View File

@@ -0,0 +1,271 @@
"""
BaseAPI 基类 - 提供自动路由注册和通用功能
"""
from fastapi import APIRouter, HTTPException, BackgroundTasks
from pydantic import BaseModel
from typing import Optional, Any, Dict, List
from abc import ABC, abstractmethod
import inspect
import time
from pathlib import Path
# 修复导入错误
from src.utils.logger import log_info, log_warning, log_error, get_logger
from src.utils.exceptions import (
ValidationException, NotFoundException, BusinessException
)
class BaseAPI(ABC):
"""
API 基类
使用方法:
1. 继承 BaseAPI
2. 定义路由方法 (自动装饰)
3. 文件名自动成为路由前缀
4. 自动获得日志、错误处理等功能
"""
def __init__(self):
"""初始化基类"""
# 获取模块名作为路由前缀
self.module_name = self.__class__.__module__.split('.')[-1]
self.router_prefix = f"/{self.module_name}"
# 创建路由器
self.router = APIRouter(
prefix=self.router_prefix,
tags=[self.module_name.capitalize()]
)
# 获取日志器
self.logger = get_logger(self.__class__.__module__)
# 自动注册路由
self._auto_register_routes()
# 记录初始化
self.logger.info(
f"API模块初始化完成",
module=self.module_name,
prefix=self.router_prefix,
routes=len(self.router.routes)
)
def _auto_register_routes(self):
"""自动注册路由方法"""
# 获取所有公共方法
methods = inspect.getmembers(self, predicate=inspect.ismethod)
for name, method in methods:
# 跳过私有方法和特殊方法
if name.startswith('_'):
continue
# 跳过基类方法
if method.__self__.__class__ == BaseAPI:
continue
# 检查方法是否有路径装饰器HTTP路由
if hasattr(method, '__route_config__'):
route_config = method.__route_config__
self._register_route(method, route_config)
# 检查方法是否有WebSocket装饰器
elif hasattr(method, '__websocket_config__'):
websocket_config = method.__websocket_config__
self._register_websocket(method, websocket_config)
def _register_route(self, method, route_config: Dict[str, Any]):
"""注册单个路由"""
http_method = route_config['method']
path = route_config['path']
response_model = route_config.get('response_model')
summary = route_config.get('summary', method.__name__)
tags = route_config.get('tags', [self.module_name])
# 创建路由处理器(包装原始方法以添加日志)
async def wrapped_handler(*args, **kwargs):
return await self._handle_request(method, *args, **kwargs)
# 复制原始方法的签名和文档
wrapped_handler.__signature__ = inspect.signature(method)
wrapped_handler.__doc__ = method.__doc__
wrapped_handler.__name__ = method.__name__
# 注册路由
self.router.add_api_route(
path=path,
endpoint=wrapped_handler,
methods=[http_method],
response_model=response_model,
summary=summary,
tags=tags
)
self.logger.info(
f"路由注册成功",
method=http_method,
path=f"{self.router_prefix}{path}",
handler=method.__name__
)
async def _handle_request(self, method, *args, **kwargs):
"""请求处理器包装器"""
start_time = time.time()
try:
# 记录请求开始
self.logger.info(
f"请求开始",
method=method.__name__,
args=args,
kwargs={k: v for k, v in kwargs.items() if k != 'request'}
)
# 执行原始方法
result = await method(*args, **kwargs)
# 记录成功
execution_time = time.time() - start_time
self.logger.info(
f"请求成功",
method=method.__name__,
execution_time=f"{execution_time:.4f}s"
)
return result
except Exception as e:
# 记录错误
execution_time = time.time() - start_time
self.logger.error(
f"请求失败",
method=method.__name__,
error=str(e),
error_type=type(e).__name__,
execution_time=f"{execution_time:.4f}s"
)
raise
# 快速响应方法
def success(self, data: Any = None, message: str = "操作成功") -> Dict[str, Any]:
"""成功响应"""
return {
"success": True,
"message": message,
"data": data,
"timestamp": time.time()
}
def error(self, message: str = "操作失败", code: str = "UNKNOWN_ERROR") -> Dict[str, Any]:
"""错误响应"""
return {
"success": False,
"message": message,
"code": code,
"timestamp": time.time()
}
def paginated_response(self, items: List[Any], page: int, size: int, total: int) -> Dict[str, Any]:
"""分页响应"""
return self.success({
"items": items,
"pagination": {
"page": page,
"size": size,
"total": total,
"pages": (total + size - 1) // size
}
}, "数据获取成功")
def _register_websocket(self, method, websocket_config: Dict[str, Any]):
"""注册WebSocket端点"""
path = websocket_config['path']
# 注册WebSocket路由
self.router.add_websocket_route(
path=path,
endpoint=method,
name=method.__name__
)
self.logger.info(
f"WebSocket路由注册成功",
path=f"{self.router_prefix}{path}",
handler=method.__name__
)
# 路由装饰器
def route(method: str, path: str = "/", response_model: Optional[type] = None,
summary: Optional[str] = None, tags: Optional[List[str]] = None):
"""
路由装饰器
Args:
method: HTTP方法 (GET, POST, PUT, DELETE 等)
path: 路径,相对于模块前缀
response_model: 响应模型
summary: API摘要
tags: 标签列表
"""
def decorator(func):
func.__route_config__ = {
'method': method.upper(),
'path': path,
'response_model': response_model,
'summary': summary or func.__name__,
'tags': tags
}
return func
return decorator
# 便捷装饰器
def get(path: str = "/", response_model: Optional[type] = None, **kwargs):
"""GET 路由装饰器"""
return route("GET", path, response_model, **kwargs)
def post(path: str = "/", response_model: Optional[type] = None, **kwargs):
"""POST 路由装饰器"""
return route("POST", path, response_model, **kwargs)
def put(path: str = "/", response_model: Optional[type] = None, **kwargs):
"""PUT 路由装饰器"""
return route("PUT", path, response_model, **kwargs)
def delete(path: str = "/", response_model: Optional[type] = None, **kwargs):
"""DELETE 路由装饰器"""
return route("DELETE", path, response_model, **kwargs)
# 通用请求/响应模型
class BaseResponse(BaseModel):
"""基础响应模型"""
success: bool = True
message: str
timestamp: float
class ErrorResponse(BaseModel):
"""错误响应模型"""
success: bool = False
message: str
code: str
timestamp: float
class DataResponse(BaseResponse):
"""数据响应模型"""
data: Any
class PaginatedResponse(BaseResponse):
"""分页响应模型"""
data: Dict[str, Any] # 包含 items 和 pagination

View File

@@ -0,0 +1,251 @@
"""
API 模块自动发现和注册系统
"""
import os
import importlib
import inspect
from pathlib import Path
from typing import List, Dict, Any, Type
from .base import BaseAPI
from ...utils import log_info, log_warning, log_error, get_logger
logger = get_logger(__name__)
class APIDiscovery:
"""API 模块自动发现和注册器"""
def __init__(self, api_package: str = "src.api"):
"""
初始化发现器
Args:
api_package: API 包路径
"""
self.api_package = api_package
self.api_modules: Dict[str, BaseAPI] = {}
self.discovered_modules: List[str] = []
def discover_and_register(self, app) -> Dict[str, Any]:
"""
发现并注册所有 API 模块
Returns:
注册结果统计
"""
log_info("开始自动发现 API 模块")
# 获取 API 目录
api_dir = Path(self.api_package.replace('.', '/'))
if not api_dir.exists():
log_error(f"API 目录不存在: {api_dir}")
return {"success": False, "error": "API directory not found"}
# 扫描 Python 文件
python_files = self._scan_python_files(api_dir)
log_info(f"发现 {len(python_files)} 个 Python 文件")
# 导入和注册模块
success_count = 0
error_count = 0
for file_path in python_files:
try:
module_name = self._get_module_name(file_path, api_dir)
# 跳过特殊模块
if self._should_skip_module(module_name):
log_info(f"跳过模块: {module_name}")
continue
# 导入模块
module = self._import_module(module_name)
# 查找 BaseAPI 子类
api_instances = self._find_api_instances(module)
if api_instances:
for instance in api_instances:
# 注册路由
app.include_router(instance.router)
self.api_modules[module_name] = instance
log_info(
f"API 模块注册成功",
module=module_name,
prefix=instance.router_prefix,
routes=len(instance.router.routes)
)
success_count += 1
self.discovered_modules.append(module_name)
else:
log_warning(f"模块中未找到 BaseAPI 子类: {module_name}")
except Exception as e:
error_count += 1
log_error(f"模块注册失败: {file_path.name}", error=str(e))
# 返回统计结果
result = {
"success": True,
"total_files": len(python_files),
"registered_modules": success_count,
"failed_modules": error_count,
"discovered_modules": self.discovered_modules,
"api_modules": list(self.api_modules.keys())
}
log_info(
f"API 模块发现完成",
**{k: v for k, v in result.items() if k != "api_modules"}
)
return result
def _scan_python_files(self, api_dir: Path) -> List[Path]:
"""扫描 API 目录中的 Python 文件"""
python_files = []
# 扫描 internal 子目录框架内置API如 monitoring.py
internal_dir = api_dir / "internal"
if internal_dir.exists():
for file_path in internal_dir.glob("*.py"):
if file_path.name.startswith('_') or file_path.name == '__pycache__':
continue
python_files.append(file_path)
# 扫描 modules 子目录用户自定义业务API
modules_dir = api_dir / "modules"
if modules_dir.exists():
for file_path in modules_dir.glob("*.py"):
if file_path.name.startswith('_') or file_path.name == '__pycache__':
continue
python_files.append(file_path)
return sorted(python_files)
def _get_module_name(self, file_path: Path, api_dir: Path) -> str:
"""从文件路径获取模块名"""
relative_path = file_path.relative_to(api_dir)
module_name = str(relative_path.with_suffix('')).replace(os.sep, '.')
return f"{self.api_package}.{module_name}"
def _should_skip_module(self, module_name: str) -> bool:
"""判断是否应该跳过模块"""
skip_patterns = [
'__init__',
'base',
'discovery',
'example' # 跳过旧的 example.py使用新的基类系统
]
return any(pattern in module_name for pattern in skip_patterns)
def _import_module(self, module_name: str):
"""动态导入模块"""
try:
return importlib.import_module(module_name)
except ImportError as e:
log_error(f"模块导入失败: {module_name}", error=str(e))
raise
def _find_api_instances(self, module) -> List[BaseAPI]:
"""在模块中查找 BaseAPI 实例"""
api_instances = []
# 检查模块级别的属性
for name, obj in inspect.getmembers(module):
if isinstance(obj, BaseAPI):
api_instances.append(obj)
# 如果没有找到实例,检查是否有类定义
if not api_instances:
for name, obj in inspect.getmembers(module, inspect.isclass):
if (issubclass(obj, BaseAPI) and
obj != BaseAPI and
obj.__module__ == module.__name__):
# 尝试实例化
try:
instance = obj()
api_instances.append(instance)
except Exception as e:
log_warning(f"API 类实例化失败: {name}", error=str(e))
return api_instances
def get_module_info(self) -> Dict[str, Any]:
"""获取已注册模块的信息"""
info = {}
for module_name, api_instance in self.api_modules.items():
info[module_name] = {
"prefix": api_instance.router_prefix,
"routes_count": len(api_instance.router.routes),
"routes": [
{
"path": route.path,
"methods": list(route.methods),
"summary": getattr(route.endpoint, '__doc__', 'No summary')
}
for route in api_instance.router.routes
]
}
return info
def reload_module(self, module_name: str, app):
"""重新加载指定模块"""
if module_name not in self.api_modules:
log_error(f"模块未找到: {module_name}")
return False
try:
# 重新导入模块
module = importlib.import_module(module_name)
# 查找新的 API 实例
new_instances = self._find_api_instances(module)
if new_instances:
# 移除旧路由FastAPI 不支持直接移除,需要重新创建应用)
# 这里只是更新实例,实际的路由更新需要重启应用
old_instance = self.api_modules[module_name]
self.api_modules[module_name] = new_instances[0]
log_info(f"模块重新加载成功: {module_name}")
return True
else:
log_error(f"重新加载后未找到 API 实例: {module_name}")
return False
except Exception as e:
log_error(f"模块重新加载失败: {module_name}", error=str(e))
return False
# 全局发现器实例
_discovery_instance = None
def get_discovery() -> APIDiscovery:
"""获取全局发现器实例"""
global _discovery_instance
if _discovery_instance is None:
_discovery_instance = APIDiscovery()
return _discovery_instance
def auto_register_routes(app):
"""自动注册路由的便捷函数"""
discovery = get_discovery()
return discovery.discover_and_register(app)
def get_registered_modules_info():
"""获取已注册模块信息的便捷函数"""
discovery = get_discovery()
return discovery.get_module_info()

View File

@@ -0,0 +1,499 @@
from typing import Dict, Any, List, Set
import os
import sys
import time
import platform
from pathlib import Path
from fastapi import BackgroundTasks
from pydantic import BaseModel
from src.api.internal.base import BaseAPI, get, post, delete
from src.config import settings
# Pydantic模型
class BatchDownloadRequest(BaseModel):
"""批量下载请求模型"""
files: List[str] # 日志文件路径列表,相对路径
class BatchDeleteRequest(BaseModel):
"""批量删除请求模型"""
files: List[str] # 日志文件路径列表,相对路径
class MonitoringAPI(BaseAPI):
"""监控相关API"""
def _get_business_modules(self) -> Set[str]:
"""获取业务模块列表(从 src/api/modules 目录)"""
try:
# 获取业务模块目录
modules_dir = Path(__file__).parent.parent / "modules"
if not modules_dir.exists():
return set()
# 获取所有 Python 文件(排除 __init__.py 和 __pycache__
business_modules = set()
for file_path in modules_dir.glob("*.py"):
if file_path.name.startswith("__"):
continue
# 文件名(不含扩展名)就是模块名
module_name = file_path.stem
business_modules.add(module_name)
return business_modules
except Exception as e:
self.logger.warning(f"Failed to get business modules: {str(e)}")
return set()
def _is_business_log(self, log_name: str) -> bool:
"""判断是否为业务日志文件"""
# 日志文件名格式:{route_name}_success.log 或 {route_name}_error.log
# 去掉 _success 或 _error 后缀,获取路由名
if log_name.endswith("_success.log"):
route_name = log_name[:-12] # 去掉 "_success.log"
elif log_name.endswith("_error.log"):
route_name = log_name[:-10] # 去掉 "_error.log"
else:
# 其他格式的日志文件,可能是旧格式或特殊格式
# 尝试去掉 .log 后缀
if log_name.endswith(".log"):
route_name = log_name[:-4]
else:
return False
# 获取业务模块列表
business_modules = self._get_business_modules()
# 检查路由名是否在业务模块列表中
return route_name in business_modules
@get("/status", summary="获取服务器状态")
async def get_server_status(self):
"""
获取服务器基本状态信息
"""
try:
# 获取系统信息
hostname = os.environ.get('COMPUTERNAME', 'Unknown')
system_type = "Windows" if os.name == 'nt' else "Linux"
python_version = f"{sys.version_info.major}.{sys.version_info.minor}.{sys.version_info.micro}"
system_info = {
"hostname": hostname,
"system": system_type,
"python_version": python_version,
"timestamp": time.time()
}
return self.success({
"system": system_info
}, "Get server status success")
except Exception as e:
import traceback
self.logger.error(f"Failed to get server status: {str(e)}")
self.logger.error(f"Error traceback: {traceback.format_exc()}")
return self.error(f"Failed to get server status: {str(e)}")
@get("/logs", summary="获取日志列表")
async def get_logs_list(self, date: str = None):
"""
获取日志列表,支持按时间分类
Args:
date: 日期格式为YYYY-MM-DD如不提供则返回所有日期文件夹
"""
try:
log_dir = Path(settings.log_dir) if hasattr(settings, 'log_dir') else Path("./logs")
log_dir.mkdir(exist_ok=True)
if date:
# 获取指定日期的日志文件
date_log_dir = log_dir / date
if not date_log_dir.exists():
return self.success({
"logs": [],
"total": 0,
"date": date
}, f"No logs found for date {date}")
logs = []
for file in date_log_dir.glob("*.log"):
# 只包含业务模块的日志
if not self._is_business_log(file.name):
continue
stats = file.stat()
logs.append({
"name": file.name,
"path": str(file),
"relative_path": str(file.relative_to(log_dir)),
"size": stats.st_size,
"created_at": stats.st_ctime,
"modified_at": stats.st_mtime
})
# 按修改时间降序排序
logs.sort(key=lambda x: x["modified_at"], reverse=True)
return self.success({
"logs": logs,
"total": len(logs),
"date": date
}, f"Get logs list for date {date} success")
else:
# 获取所有日期文件夹
date_folders = []
for folder in log_dir.iterdir():
if folder.is_dir():
# 检查文件夹名是否为日期格式YYYY-MM-DD
try:
time.strptime(folder.name, "%Y-%m-%d")
# 获取文件夹下的业务日志文件数量(只统计业务模块的日志)
business_logs = [f for f in folder.glob("*.log") if f.is_file() and self._is_business_log(f.name)]
log_count = len(business_logs)
# 获取最后修改时间(只考虑业务日志)
max_mtime = max(
(f.stat().st_mtime for f in business_logs),
default=folder.stat().st_mtime
)
date_folders.append({
"date": folder.name,
"path": str(folder),
"log_count": log_count,
"last_modified": max_mtime
})
except ValueError:
# 不是日期格式,跳过
continue
# 按日期降序排序
date_folders.sort(key=lambda x: x["date"], reverse=True)
return self.success({
"dates": date_folders,
"total": len(date_folders)
}, "Get all log dates success")
except Exception as e:
self.logger.error("Failed to get logs list", error=str(e))
return self.error(f"Failed to get logs list: {str(e)}")
@get("/logs/{date}/{log_name}", summary="获取指定日期的日志内容")
async def get_log_content_by_date(self, date: str, log_name: str, entries: int = 10, mode: str = "latest"):
"""
获取指定日期的日志内容
Args:
date: 日期格式为YYYY-MM-DD
log_name: 日志文件名
entries: 返回的日志条目数量默认10个
mode: 显示模式latest表示最新的N个条目显示在顶部oldest表示最早的N个条目显示在顶部默认latest
"""
try:
log_dir = Path(settings.log_dir) if hasattr(settings, 'log_dir') else Path("./logs")
log_file = log_dir / date / log_name
if not log_file.exists():
return self.error("Log file not found", code="NOT_FOUND")
# 读取日志文件内容
with open(log_file, 'r', encoding='utf-8') as f:
content = f.read()
# 解析多行JSON日志条目
log_entries = []
current_entry = []
brace_count = 0
in_entry = False
for line in content.splitlines():
stripped_line = line.strip()
if not stripped_line:
continue
# 开始新的日志条目
if stripped_line.startswith('{'):
# 如果有未完成的条目,先保存它
if in_entry and current_entry:
# 检查当前条目是否有内容,如果有就保存(即使不完整)
if current_entry and any(current_entry):
log_entries.append('\n'.join(current_entry))
current_entry = [line]
brace_count = 1
in_entry = True
elif in_entry:
current_entry.append(line)
brace_count += stripped_line.count('{')
brace_count -= stripped_line.count('}')
# 完整的日志条目
if brace_count == 0:
log_entries.append('\n'.join(current_entry))
in_entry = False
# 如果还有未完成的条目,也要保存(可能是正在写入的最后一条日志)
if in_entry and current_entry and any(current_entry):
log_entries.append('\n'.join(current_entry))
result_entries = []
# 根据模式处理日志条目
if mode == "oldest":
# 最早的N个条目顺序不变
result_entries = log_entries[:entries]
else: # latest
# 最新的N个条目反转顺序最新的在顶部
latest_entries = log_entries[-entries:]
latest_entries.reverse()
result_entries = latest_entries
# 拼接成完整的日志内容,添加分隔线(独占一行)
separator = "\n" + "=" * 80 + "\n\n"
result_content = separator.join(result_entries) + "\n"
return self.success({
"log_name": log_name,
"date": date,
"entries": len(result_entries),
"content": result_content,
"mode": mode
}, "Get log content success")
except Exception as e:
self.logger.error("Failed to get log content", error=str(e))
return self.error(f"Failed to get log content: {str(e)}")
@get("/logs/{date}/{log_name}/download", summary="下载指定日期的日志文件")
async def download_log_by_date(self, date: str, log_name: str):
"""
下载指定日期的日志文件
Args:
date: 日期格式为YYYY-MM-DD
log_name: 日志文件名
"""
try:
from fastapi.responses import FileResponse
log_dir = Path(settings.log_dir) if hasattr(settings, 'log_dir') else Path("./logs")
log_file = log_dir / date / log_name
if not log_file.exists():
return self.error("Log file not found", code="NOT_FOUND")
return FileResponse(
log_file,
media_type="application/octet-stream",
filename=log_file.name
)
except Exception as e:
self.logger.error("Failed to download log file", error=str(e))
return self.error(f"Failed to download log file: {str(e)}")
@get("/logs/{log_name}", summary="获取日志内容")
async def get_log_content(self, log_name: str, lines: int = 100):
"""
获取日志文件内容
Args:
log_name: 日志文件名
lines: 返回的行数默认100行
"""
try:
log_dir = Path(settings.log_dir) if hasattr(settings, 'log_dir') else Path("./logs")
log_file = log_dir / log_name
if not log_file.exists():
return self.error("Log file not found", code="NOT_FOUND")
# 读取日志文件的最后N行
with open(log_file, 'r', encoding='utf-8') as f:
content = f.readlines()
# 返回最后lines行
content = content[-lines:]
return self.success({
"log_name": log_name,
"lines": len(content),
"content": "".join(content)
}, "Get log content success")
except Exception as e:
self.logger.error("Failed to get log content", error=str(e))
return self.error(f"Failed to get log content: {str(e)}")
@get("/logs/{log_name}/download", summary="下载日志文件")
async def download_log(self, log_name: str):
"""
下载日志文件
Args:
log_name: 日志文件名支持带日期路径如YYYY-MM-DD/filename.log
"""
try:
from fastapi.responses import FileResponse
log_dir = Path(settings.log_dir) if hasattr(settings, 'log_dir') else Path("./logs")
log_file = log_dir / log_name
if not log_file.exists():
return self.error("Log file not found", code="NOT_FOUND")
return FileResponse(
log_file,
media_type="application/octet-stream",
filename=log_file.name
)
except Exception as e:
self.logger.error("Failed to download log file", error=str(e))
return self.error(f"Failed to download log file: {str(e)}")
@post("/logs/download", summary="批量下载日志文件")
async def batch_download_logs(self, request: BatchDownloadRequest):
"""
批量下载日志文件
Args:
request: 批量下载请求,包含日志文件路径列表
"""
try:
import zipfile
import tempfile
from fastapi.responses import FileResponse
files = request.files
if not files:
return self.error("No files selected", code="BAD_REQUEST")
log_dir = Path(settings.log_dir) if hasattr(settings, 'log_dir') else Path("./logs")
# 创建临时ZIP文件
with tempfile.NamedTemporaryFile(suffix=".zip", delete=False) as temp_zip:
temp_zip_path = temp_zip.name
# 写入日志文件到ZIP
with zipfile.ZipFile(temp_zip_path, 'w', zipfile.ZIP_DEFLATED) as zipf:
for file_path in files:
abs_file_path = log_dir / file_path
if abs_file_path.exists():
# 保持原始目录结构
zipf.write(abs_file_path, arcname=file_path)
# 返回ZIP文件
return FileResponse(
temp_zip_path,
media_type="application/zip",
filename=f"logs_{time.strftime('%Y%m%d_%H%M%S')}.zip",
background=BackgroundTasks([lambda: Path(temp_zip_path).unlink(missing_ok=True)])
)
except Exception as e:
self.logger.error("Failed to batch download logs", error=str(e))
return self.error(f"Failed to batch download logs: {str(e)}")
@delete("/logs/{log_name}", summary="删除单个日志文件")
async def delete_log(self, log_name: str):
"""
删除单个日志文件
Args:
log_name: 日志文件名支持带日期路径如YYYY-MM-DD/filename.log
"""
try:
log_dir = Path(settings.log_dir) if hasattr(settings, 'log_dir') else Path("./logs")
log_file = log_dir / log_name
if not log_file.exists():
return self.error("Log file not found", code="NOT_FOUND")
# 删除日志文件
log_file.unlink()
return self.success({
"log_name": log_name,
"deleted": True
}, "Log file deleted successfully")
except Exception as e:
self.logger.error(f"Failed to delete log file {log_name}", error=str(e))
return self.error(f"Failed to delete log file: {str(e)}")
@delete("/logs/{date}/{log_name}", summary="删除指定日期的单个日志文件")
async def delete_log_by_date(self, date: str, log_name: str):
"""
删除指定日期的单个日志文件
Args:
date: 日期格式为YYYY-MM-DD
log_name: 日志文件名
"""
try:
log_dir = Path(settings.log_dir) if hasattr(settings, 'log_dir') else Path("./logs")
log_file = log_dir / date / log_name
if not log_file.exists():
return self.error("Log file not found", code="NOT_FOUND")
# 删除日志文件
log_file.unlink()
return self.success({
"log_name": log_name,
"date": date,
"deleted": True
}, "Log file deleted successfully")
except Exception as e:
self.logger.error(f"Failed to delete log file {date}/{log_name}", error=str(e))
return self.error(f"Failed to delete log file: {str(e)}")
@post("/logs/batch/delete", summary="批量删除日志文件")
async def batch_delete_logs(self, request: BatchDeleteRequest):
"""
批量删除日志文件
Args:
request: 批量删除请求,包含日志文件路径列表
"""
try:
files = request.files
if not files:
return self.error("No files selected", code="BAD_REQUEST")
log_dir = Path(settings.log_dir) if hasattr(settings, 'log_dir') else Path("./logs")
# 统计删除结果
deleted_count = 0
failed_count = 0
failed_files = []
# 批量删除日志文件
for file_path in files:
abs_file_path = log_dir / file_path
if abs_file_path.exists():
try:
abs_file_path.unlink()
deleted_count += 1
except Exception as e:
self.logger.error(f"Failed to delete log file {file_path}", error=str(e))
failed_count += 1
failed_files.append({
"file": file_path,
"error": str(e)
})
else:
failed_count += 1
failed_files.append({
"file": file_path,
"error": "File not found"
})
return self.success({
"total": len(files),
"deleted": deleted_count,
"failed": failed_count,
"failed_files": failed_files
}, f"Batch delete completed. Deleted: {deleted_count}, Failed: {failed_count}")
except Exception as e:
self.logger.error("Failed to batch delete logs", error=str(e))
return self.error(f"Failed to batch delete logs: {str(e)}")
# 创建API实例
monitoring_api = MonitoringAPI()

View File

@@ -0,0 +1,9 @@
"""
日志管理API模块
提供日志文件的管理、搜索、下载等功能
"""
from .routes import router
__all__ = ["router"]

View File

@@ -0,0 +1,289 @@
from fastapi import APIRouter, HTTPException, Depends, Query
from fastapi.responses import FileResponse, JSONResponse
from typing import Optional, List, Dict, Any
from datetime import datetime, timedelta
from pathlib import Path
import os
import json
from ..utils.advanced_logger import advanced_logger_manager
from ..utils import get_logger
router = APIRouter()
logger = get_logger(__name__)
@router.get("/stats")
async def get_log_stats():
"""获取日志统计信息"""
try:
stats = advanced_logger_manager.get_log_stats()
return {
"success": True,
"data": stats
}
except Exception as e:
logger.error(f"Failed to get log stats: {str(e)}")
raise HTTPException(status_code=500, detail="Failed to retrieve log statistics")
@router.get("/cleanup")
async def cleanup_logs(days: Optional[int] = Query(None, description="删除多少天前的日志,默认使用配置值")):
"""清理过期日志文件"""
try:
result = advanced_logger_manager.cleanup_old_logs(days)
return {
"success": True,
"message": "Log cleanup completed successfully",
"data": result
}
except Exception as e:
logger.error(f"Failed to cleanup logs: {str(e)}")
raise HTTPException(status_code=500, detail="Failed to cleanup logs")
@router.get("/directories")
async def get_log_directories():
"""获取所有日志日期目录"""
try:
logs_dir = Path(advanced_logger_manager.logs_dir)
directories = []
for date_dir in logs_dir.iterdir():
if not date_dir.is_dir():
continue
try:
# 验证日期格式
datetime.strptime(date_dir.name, "%Y-%m-%d")
# 获取目录信息
dir_stats = {
"name": date_dir.name,
"path": str(date_dir),
"size_bytes": 0,
"file_count": 0,
"files": []
}
# 统计文件信息
for log_file in date_dir.glob("*.log"):
file_stats = log_file.stat()
dir_stats["file_count"] += 1
dir_stats["size_bytes"] += file_stats.st_size
dir_stats["files"].append({
"name": log_file.name,
"size_bytes": file_stats.st_size,
"modified": datetime.fromtimestamp(file_stats.st_mtime).isoformat()
})
dir_stats["size_mb"] = round(dir_stats["size_bytes"] / (1024 * 1024), 2)
directories.append(dir_stats)
except ValueError:
# 跳过非日期格式的目录
continue
# 按日期排序(最新的在前)
directories.sort(key=lambda x: x["name"], reverse=True)
return {
"success": True,
"data": {
"directories": directories,
"total_directories": len(directories),
"base_path": str(logs_dir)
}
}
except Exception as e:
logger.error(f"Failed to get log directories: {str(e)}")
raise HTTPException(status_code=500, detail="Failed to retrieve log directories")
@router.get("/files/{date}/{route_name}")
async def get_log_file(date: str, route_name: str, lines: Optional[int] = Query(100, ge=1, le=10000, description="返回的行数")):
"""获取指定日期和路由的日志文件内容"""
try:
# 验证日期格式
datetime.strptime(date, "%Y-%m-%d")
# 验证路由名
if not route_name.replace('_', '').replace('-', '').isalnum():
raise ValueError("Invalid route name")
log_path = advanced_logger_manager.get_route_log_path(route_name, datetime.strptime(date, "%Y-%m-%d"))
if not log_path.exists():
raise HTTPException(status_code=404, detail=f"Log file not found: {route_name}.log for date {date}")
# 读取文件内容
try:
with open(log_path, 'r', encoding='utf-8') as f:
file_lines = f.readlines()
# 取最后N行
if len(file_lines) > lines:
file_lines = file_lines[-lines:]
# 解析JSON日志行
parsed_lines = []
for line in file_lines:
line = line.strip()
if line:
try:
parsed_line = json.loads(line)
parsed_lines.append(parsed_line)
except json.JSONDecodeError:
# 如果不是JSON格式保持原样
parsed_lines.append({"raw": line})
return {
"success": True,
"data": {
"file": str(log_path),
"date": date,
"route_name": route_name,
"total_lines": len(file_lines),
"showed_lines": len(parsed_lines),
"lines": parsed_lines
}
}
except Exception as e:
logger.error(f"Failed to read log file {log_path}: {str(e)}")
raise HTTPException(status_code=500, detail=f"Failed to read log file: {str(e)}")
except ValueError as e:
raise HTTPException(status_code=400, detail=f"Invalid date format: {str(e)}")
except HTTPException:
raise
except Exception as e:
logger.error(f"Unexpected error getting log file: {str(e)}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.get("/download/{date}/{route_name}")
async def download_log_file(date: str, route_name: str):
"""下载指定的日志文件"""
try:
# 验证日期格式
datetime.strptime(date, "%Y-%m-%d")
# 验证路由名
if not route_name.replace('_', '').replace('-', '').isalnum():
raise ValueError("Invalid route name")
log_path = advanced_logger_manager.get_route_log_path(route_name, datetime.strptime(date, "%Y-%m-%d"))
if not log_path.exists():
raise HTTPException(status_code=404, detail=f"Log file not found: {route_name}.log for date {date}")
return FileResponse(
path=str(log_path),
filename=f"{date}_{route_name}.log",
media_type="text/plain"
)
except ValueError as e:
raise HTTPException(status_code=400, detail=f"Invalid date format: {str(e)}")
except HTTPException:
raise
except Exception as e:
logger.error(f"Unexpected error downloading log file: {str(e)}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.get("/search")
async def search_logs(
date: str = Query(..., description="搜索日期 (YYYY-MM-DD)"),
route_name: Optional[str] = Query(None, description="路由名称"),
keyword: Optional[str] = Query(None, description="搜索关键词"),
level: Optional[str] = Query(None, description="日志级别 (DEBUG, INFO, WARNING, ERROR, CRITICAL)"),
limit: int = Query(100, ge=1, le=1000, description="返回结果数量限制")
):
"""搜索日志内容"""
try:
# 验证日期格式
search_date = datetime.strptime(date, "%Y-%m-%d")
# 确定搜索范围
if route_name:
# 搜索特定路由的日志
log_files = [advanced_logger_manager.get_route_log_path(route_name, search_date)]
else:
# 搜索所有路由的日志
date_dir = advanced_logger_manager.get_date_log_dir(search_date)
log_files = list(date_dir.glob("*.log")) if date_dir.exists() else []
results = []
for log_file in log_files:
if not log_file.exists():
continue
try:
with open(log_file, 'r', encoding='utf-8') as f:
for line_num, line in enumerate(f, 1):
line = line.strip()
if not line:
continue
try:
log_entry = json.loads(line)
# 应用过滤条件
if level and log_entry.get("level", "").upper() != level.upper():
continue
if keyword:
# 在消息中搜索关键词
message = str(log_entry.get("message", "")) + str(log_entry.get("event", ""))
if keyword.lower() not in message.lower():
continue
# 添加文件信息
log_entry["file_info"] = {
"file": log_file.name,
"line_number": line_num
}
results.append(log_entry)
# 检查是否达到限制
if len(results) >= limit:
break
except json.JSONDecodeError:
# 非JSON格式的行跳过
continue
if len(results) >= limit:
break
except Exception as e:
logger.warning(f"Failed to search in file {log_file}: {str(e)}")
continue
# 按时间戳排序(最新的在前)
results.sort(key=lambda x: x.get("timestamp", ""), reverse=True)
return {
"success": True,
"data": {
"search_criteria": {
"date": date,
"route_name": route_name,
"keyword": keyword,
"level": level,
"limit": limit
},
"total_found": len(results),
"results": results
}
}
except ValueError as e:
raise HTTPException(status_code=400, detail=f"Invalid date format: {str(e)}")
except Exception as e:
logger.error(f"Failed to search logs: {str(e)}")
raise HTTPException(status_code=500, detail="Failed to search logs")

View File

@@ -0,0 +1,11 @@
from fastapi import APIRouter
from .management import router as management_router
# 创建主路由器
router = APIRouter(prefix="/logs", tags=["日志管理"])
# 包含所有子路由
router.include_router(management_router)
# 导出路由器
__all__ = ["router"]

View File

@@ -0,0 +1,20 @@
"""
业务模块目录
此目录用于存放用户自定义的业务API模块。
框架会自动扫描此目录并注册所有继承 BaseAPI 的模块。
示例:
from src.api.base import BaseAPI, route
class MyAPI(BaseAPI):
router_prefix = "/myapi"
router_tags = ["MyAPI"]
@route.get("/hello")
async def hello(self):
return {"message": "Hello!"}
# 创建实例供自动发现
my_api = MyAPI()
"""

View File

@@ -0,0 +1,501 @@
"""
数据集管理 API 模块
提供数据集上传、列表、删除等功能
"""
from typing import List, Optional
from fastapi import UploadFile, File, Form
from src.api.internal.base import BaseAPI, post, get, delete
from src.models.response import StandardResponse
from src.services.file_upload import file_upload_service
def format_file_size(size_bytes: int) -> str:
"""
格式化文件大小显示
Args:
size_bytes: 文件大小(字节)
Returns:
str: 格式化后的文件大小字符串
"""
if size_bytes < 1024:
return f"{size_bytes} B"
elif size_bytes < 1024 * 1024:
size_kb = round(size_bytes / 1024, 2)
return f"{size_kb} KB"
else:
size_mb = round(size_bytes / 1024 / 1024, 2)
return f"{size_mb} MB"
class DatasetAPI(BaseAPI):
"""数据集管理 API - 自动注册到 /api/datasets 路径"""
def __init__(self):
# 重写初始化逻辑以设置正确的路由前缀
# 1. 手动设置 module_name
self.module_name = "api.datasets"
# 2. 创建路由器(使用期望的前缀)
from fastapi import APIRouter
self.router_prefix = "/api/datasets"
self.router = APIRouter(
prefix=self.router_prefix,
tags=["Datasets"]
)
# 3. 获取日志器
import sys
self.logger = __import__('src.utils.logger', fromlist=['get_logger']).get_logger(self.__class__.__module__)
# 4. 调用基类的自动注册此时router已被覆盖
# 注意我们不调用父类__init__而是手动调用_auto_register_routes
self._auto_register_routes()
# 5. 记录初始化
self.logger.info(
f"API模块初始化完成",
module=self.module_name,
prefix=self.router_prefix,
routes=len(self.router.routes)
)
@post("/upload", response_model=StandardResponse)
async def upload_dataset(
self,
file: UploadFile = File(...),
description: Optional[str] = Form(None)
):
"""
上传数据集文件
Args:
file: 上传的文件(支持 .json, .jsonl 格式)
description: 文件描述(可选)
Returns:
StandardResponse: 包含上传结果的标准响应
"""
try:
# 验证文件类型
filename = file.filename or "unknown"
file_ext = filename.lower().split('.')[-1] if '.' in filename else ''
if file_ext not in ['json', 'jsonl']:
return StandardResponse.error("只支持 .json 和 .jsonl 格式的文件")
# 读取文件内容
file_content = await file.read()
# 如果未提供描述,使用默认描述
if not description:
description = f"用户上传的数据集文件: {filename}"
# 使用文件上传服务上传文件
uploaded_file = await file_upload_service.upload_file(
file_content=file_content,
original_filename=filename,
content_type=file.content_type,
description=description
)
# 转换为前端期望的格式
# 显示真实文件名(从映射文件中获取)
mapping = file_upload_service.get_filename_mapping(uploaded_file.file_id)
display_name = mapping["original_filename"] if mapping else uploaded_file.original_filename
# 格式化文件大小
size_mb = round(uploaded_file.file_size / 1024 / 1024, 2)
size_display = format_file_size(uploaded_file.file_size)
dataset_info = {
"file_id": uploaded_file.file_id,
"name": display_name,
"size": uploaded_file.file_size,
"size_mb": size_mb,
"size_display": size_display,
"status": "已处理", # 默认状态
"uploaded_at": uploaded_file.uploaded_at,
"description": uploaded_file.description
}
return StandardResponse.success({
"message": "数据集上传成功",
"dataset": dataset_info
})
except ValueError as e:
return StandardResponse.error(str(e))
except Exception as e:
return StandardResponse.error(f"上传失败: {str(e)}")
@get("", response_model=StandardResponse)
async def list_datasets(self, list_all: bool = False):
"""
获取所有数据集列表
Args:
list_all: 是否列出data目录下的所有文件物理文件默认False只列出API上传的文件
Returns:
StandardResponse: 包含数据集列表的标准响应
"""
try:
if list_all:
# 列出data目录下的所有文件物理文件
import json
from pathlib import Path
data_dir = file_upload_service.upload_dir
mapping_file = data_dir / "filename_mapping.json"
# 读取文件名映射
mappings = {}
if mapping_file.exists():
try:
with open(mapping_file, 'r', encoding='utf-8') as f:
mapping_data = json.load(f)
mappings = mapping_data.get("mappings", {})
except Exception:
mappings = {}
# 获取data目录下的所有JSON文件
datasets = []
if data_dir.exists():
for file_path in data_dir.iterdir():
# 跳过目录和映射文件本身
if file_path.is_file() and file_path.name != "filename_mapping.json":
file_id = file_path.stem # 去掉.json后缀得到file_id
# 从映射文件获取真实文件名
mapping_info = mappings.get(file_id, {})
original_filename = mapping_info.get("original_filename", file_path.name)
uploaded_at = mapping_info.get("uploaded_at", "")
# 获取文件大小
file_size = file_path.stat().st_size
# 格式化文件大小
size_mb = round(file_size / 1024 / 1024, 2)
size_display = format_file_size(file_size)
datasets.append({
"file_id": file_id,
"name": original_filename,
"size": file_size,
"size_mb": size_mb,
"size_display": size_display,
"status": "已处理",
"description": mapping_info.get("original_filename", "") if mapping_info else "",
"uploaded_at": uploaded_at,
"download_count": 0,
"is_physical_file": True
})
# 按文件名排序
datasets.sort(key=lambda x: x["name"])
return StandardResponse.success({
"datasets": datasets,
"total": len(datasets),
"source": "physical_files"
})
else:
# 获取所有文件API上传的文件
all_files = file_upload_service.get_all_files()
# 转换为前端期望的格式
datasets = []
for uploaded_file in all_files:
# 只返回JSON/JSONL文件数据集文件
file_ext = uploaded_file.original_filename.lower().split('.')[-1] if '.' in uploaded_file.original_filename else ''
if file_ext in ['json', 'jsonl']:
# 获取文件名映射(显示真实文件名)
mapping = file_upload_service.get_filename_mapping(uploaded_file.file_id)
display_name = mapping["original_filename"] if mapping else uploaded_file.original_filename
# 格式化文件大小
size_mb = round(uploaded_file.file_size / 1024 / 1024, 2)
size_display = format_file_size(uploaded_file.file_size)
datasets.append({
"file_id": uploaded_file.file_id,
"name": display_name,
"size": uploaded_file.file_size,
"size_mb": size_mb,
"size_display": size_display,
"status": "已处理",
"description": uploaded_file.description or "",
"uploaded_at": uploaded_file.uploaded_at,
"download_count": uploaded_file.download_count,
"is_physical_file": False
})
return StandardResponse.success({
"datasets": datasets,
"total": len(datasets),
"source": "api_uploaded"
})
except Exception as e:
return StandardResponse.error(f"获取数据集列表失败: {str(e)}")
@get("/{file_id}", response_model=StandardResponse)
async def get_dataset(self, file_id: str):
"""
获取特定数据集的详细信息
Args:
file_id: 文件ID
Returns:
StandardResponse: 包含数据集详情的标准响应
"""
try:
file_info = file_upload_service.get_file(file_id)
if not file_info:
return StandardResponse.error(f"数据集 {file_id} 不存在")
# 转换为前端期望的格式
# 显示真实文件名(从映射文件中获取)
mapping = file_upload_service.get_filename_mapping(file_info.file_id)
display_name = mapping["original_filename"] if mapping else file_info.original_filename
# 格式化文件大小
size_mb = round(file_info.file_size / 1024 / 1024, 2)
size_display = format_file_size(file_info.file_size)
dataset_info = {
"file_id": file_info.file_id,
"name": display_name,
"size": file_info.file_size,
"size_mb": size_mb,
"size_display": size_display,
"status": "已处理",
"description": file_info.description or "",
"uploaded_at": file_info.uploaded_at,
"updated_at": file_info.updated_at,
"download_count": file_info.download_count,
"content_type": file_info.content_type,
"file_hash": file_info.file_hash
}
return StandardResponse.success(dataset_info)
except Exception as e:
return StandardResponse.error(f"获取数据集详情失败: {str(e)}")
@get("/{file_id}", response_model=StandardResponse)
async def get_dataset(self, file_id: str):
"""
获取特定数据集的详细信息
Args:
file_id: 文件ID
Returns:
StandardResponse: 包含数据集详情的标准响应
"""
try:
file_info = file_upload_service.get_file(file_id)
if not file_info:
return StandardResponse.error(f"数据集 {file_id} 不存在")
# 转换为前端期望的格式
# 显示真实文件名(从映射文件中获取)
mapping = file_upload_service.get_filename_mapping(file_info.file_id)
display_name = mapping["original_filename"] if mapping else file_info.original_filename
# 格式化文件大小
size_mb = round(file_info.file_size / 1024 / 1024, 2)
size_display = format_file_size(file_info.file_size)
dataset_info = {
"file_id": file_info.file_id,
"name": display_name,
"size": file_info.file_size,
"size_mb": size_mb,
"size_display": size_display,
"status": "已处理",
"description": file_info.description or "",
"uploaded_at": file_info.uploaded_at,
"updated_at": file_info.updated_at,
"download_count": file_info.download_count,
"content_type": file_info.content_type,
"file_hash": file_info.file_hash
}
return StandardResponse.success(dataset_info)
except Exception as e:
return StandardResponse.error(f"获取数据集详情失败: {str(e)}")
@get("/{file_id}/content", response_model=StandardResponse)
async def get_dataset_content(self, file_id: str, limit: int = 5):
"""
获取数据集文件内容前N条记录
Args:
file_id: 文件ID
limit: 返回的记录数量默认5条
Returns:
StandardResponse: 包含数据集内容的标准响应
"""
try:
import json
import jsonlines
# 获取文件信息
file_info = file_upload_service.get_file(file_id)
if not file_info:
return StandardResponse.error(f"数据集 {file_id} 不存在")
# 获取文件路径
file_path = file_upload_service.get_file_path(file_id)
if not file_path or not file_path.exists():
return StandardResponse.error(f"文件 {file_id} 不存在")
# 读取文件内容
content_preview = []
filename = file_info.original_filename.lower()
try:
if filename.endswith('.jsonl'):
# 处理JSONL格式
with jsonlines.open(file_path) as reader:
count = 0
for item in reader:
if count >= limit:
break
content_preview.append(item)
count += 1
else:
# 处理JSON格式
with open(file_path, 'r', encoding='utf-8') as f:
data = json.load(f)
if isinstance(data, list):
# 如果是数组取前N条
content_preview = data[:limit]
else:
# 如果是对象,直接返回
content_preview = data
except json.JSONDecodeError as e:
return StandardResponse.error(f"JSON文件格式错误: {str(e)}")
except Exception as e:
return StandardResponse.error(f"读取文件内容失败: {str(e)}")
# 获取真实文件名(从映射文件中获取)
mapping = file_upload_service.get_filename_mapping(file_id)
display_filename = mapping["original_filename"] if mapping else file_info.original_filename
return StandardResponse.success({
"file_id": file_id,
"filename": display_filename,
"total_records": len(content_preview),
"preview": content_preview
})
except Exception as e:
return StandardResponse.error(f"获取数据集内容失败: {str(e)}")
@delete("/{file_id}", response_model=StandardResponse)
async def delete_dataset(self, file_id: str):
"""
删除数据集
Args:
file_id: 文件ID
Returns:
StandardResponse: 包含删除结果的标准响应
"""
try:
if not file_upload_service.file_exists(file_id):
return StandardResponse.error(f"数据集 {file_id} 不存在")
success = file_upload_service.delete_file(file_id)
if success:
return StandardResponse.success({
"message": f"数据集 {file_id} 已删除"
})
else:
return StandardResponse.error(f"删除数据集 {file_id} 失败")
except Exception as e:
return StandardResponse.error(f"删除数据集失败: {str(e)}")
@get("/list-files", response_model=StandardResponse)
async def list_data_files(self):
"""
查询data目录下的文件列表
Returns:
StandardResponse: 包含文件列表的标准响应
"""
try:
import json
import os
from pathlib import Path
data_dir = file_upload_service.upload_dir
mapping_file = data_dir / "filename_mapping.json"
# 读取文件名映射
mappings = {}
if mapping_file.exists():
try:
with open(mapping_file, 'r', encoding='utf-8') as f:
mapping_data = json.load(f)
mappings = mapping_data.get("mappings", {})
except Exception:
mappings = {}
# 获取data目录下的所有JSON文件
files_info = []
if data_dir.exists():
for file_path in data_dir.iterdir():
# 跳过目录和映射文件本身
if file_path.is_file() and file_path.name != "filename_mapping.json":
file_id = file_path.stem # 去掉.json后缀得到file_id
# 从映射文件获取真实文件名
mapping_info = mappings.get(file_id, {})
original_filename = mapping_info.get("original_filename", file_path.name)
uploaded_at = mapping_info.get("uploaded_at", "")
# 获取文件大小
file_size = file_path.stat().st_size
files_info.append({
"file_id": file_id,
"original_filename": original_filename,
"storage_filename": file_path.name,
"file_path": str(file_path),
"file_size": file_size,
"file_size_mb": round(file_size / 1024 / 1024, 2),
"uploaded_at": uploaded_at,
"exists_in_mapping": file_id in mappings
})
# 按文件名排序
files_info.sort(key=lambda x: x["original_filename"])
return StandardResponse.success({
"total": len(files_info),
"files": files_info
})
except Exception as e:
return StandardResponse.error(f"查询文件列表失败: {str(e)}")
# 创建实例(自动发现系统会找到这个实例)
dataset_api = DatasetAPI()

View File

@@ -0,0 +1,3 @@
from .settings import settings
__all__ = ["settings"]

View File

@@ -0,0 +1,46 @@
from pydantic_settings import BaseSettings
from typing import Optional
import os
class Settings(BaseSettings):
# 应用配置
app_name: str = "X-Request API Framework"
app_version: str = "1.0.0"
debug: bool = False
# 服务器配置
host: str = "0.0.0.0"
port: int = 1112
workers: int = 1
# 日志配置
log_level: str = "INFO"
log_file: Optional[str] = "logs/app.log" # 默认写入日志文件
log_format: str = "json" # json 或 console
log_to_console: bool = False # 是否同时输出到控制台
# 高级日志配置
advanced_logging: bool = True # 是否启用高级日志系统
logs_dir: str = "logs" # 日志目录
max_log_days: int = 30 # 日志文件保存天数
enable_log_cleanup: bool = True # 是否启用自动日志清理
route_based_logging: bool = True # 是否启用基于路由的日志分类
# 性能配置
max_requests: int = 1000
max_connections: int = 1000
request_timeout: int = 30
# CORS配置
cors_origins: list[str] = ["*"]
cors_methods: list[str] = ["*"]
cors_headers: list[str] = ["*"]
class Config:
env_file = ".env"
case_sensitive = False
# 全局设置实例
settings = Settings()

View File

@@ -0,0 +1,3 @@
from .app import create_app, get_app
__all__ = ["create_app", "get_app"]

280
request/src/core/app.py Normal file
View File

@@ -0,0 +1,280 @@
from fastapi import FastAPI, Request, Response
from fastapi.middleware.cors import CORSMiddleware
from fastapi.middleware.gzip import GZipMiddleware
from fastapi.responses import JSONResponse
from fastapi.staticfiles import StaticFiles
from fastapi.responses import FileResponse
from contextlib import asynccontextmanager
from threading import Lock
from typing import Dict, Any, Optional
import time
import uuid
from ..config import settings
from ..utils import get_logger
from ..middleware.logging import LoggingMiddleware
from ..middleware.error_handling import ErrorHandlingMiddleware
from ..middleware.performance import PerformanceMiddleware
from ..utils.advanced_logger import advanced_logger_manager
from ..utils.doc_generator import generate_docs
from ..models.response import StandardResponse
logger = get_logger(__name__)
@asynccontextmanager
async def lifespan(app: FastAPI):
"""应用生命周期管理"""
# 启动时执行
logger.info(
"应用启动",
app_name=settings.app_name,
version=settings.app_version,
debug=settings.debug,
host=settings.host,
port=settings.port
)
# 这里可以添加初始化数据库连接、缓存等操作
await initialize_app()
yield
# 关闭时执行
logger.info("应用关闭")
await cleanup_app()
async def initialize_app():
"""初始化应用"""
# 初始化高级日志管理器
if settings.advanced_logging:
logger.info("初始化高级日志系统")
try:
# 计划日志清理任务
if settings.enable_log_cleanup:
advanced_logger_manager.schedule_cleanup()
logger.info(f"日志自动清理已启用,保留天数: {settings.max_log_days}")
# 获取初始日志统计
stats = advanced_logger_manager.get_log_stats()
logger.info("日志系统初始化完成", log_stats=stats)
except Exception as e:
logger.error("高级日志系统初始化失败", error=str(e))
# 初始化数据库连接
# 初始化缓存
# 初始化其他资源
async def cleanup_app():
"""清理应用资源"""
# 清理高级日志管理器资源
if settings.advanced_logging:
logger.info("清理高级日志系统资源")
try:
# 清理日志管理器资源
from ..utils.advanced_logger import advanced_logger_manager
advanced_logger_manager.__del__()
logger.info("高级日志系统资源清理完成")
except Exception as e:
logger.error("高级日志系统资源清理失败", error=str(e))
# 关闭数据库连接
# 清理缓存
# 清理其他资源
def create_app() -> FastAPI:
"""创建FastAPI应用实例"""
# 创建FastAPI应用
app = FastAPI(
title=settings.app_name,
version=settings.app_version,
description="高性能、高并发的请求框架",
debug=settings.debug,
lifespan=lifespan,
docs_url="/docs", # Swagger UI 始终可用
redoc_url="/redoc", # ReDoc 始终可用
openapi_url="/openapi.json", # OpenAPI JSON 始终可用
)
# 添加中间件(注意顺序很重要)
setup_middleware(app)
# 添加异常处理器
setup_exception_handlers(app)
# 添加路由
setup_routes(app)
return app
def setup_middleware(app: FastAPI):
"""设置中间件"""
# CORS中间件
app.add_middleware(
CORSMiddleware,
allow_origins=settings.cors_origins,
allow_credentials=True,
allow_methods=settings.cors_methods,
allow_headers=settings.cors_headers,
)
# Gzip压缩中间件
app.add_middleware(GZipMiddleware, minimum_size=1000)
# 重新启用所有中间件
app.add_middleware(PerformanceMiddleware)
app.add_middleware(LoggingMiddleware)
app.add_middleware(ErrorHandlingMiddleware)
def setup_exception_handlers(app: FastAPI):
"""设置异常处理器"""
@app.exception_handler(Exception)
async def global_exception_handler(request: Request, exc: Exception):
"""全局异常处理器"""
request_id = getattr(request.state, "request_id", "unknown")
logger.error(
"未处理的异常",
request_id=request_id,
path=request.url.path,
method=request.method,
error=str(exc),
error_type=type(exc).__name__,
)
return JSONResponse(
status_code=500,
content={
"success": False,
"message": "服务器内部错误",
"request_id": request_id,
"error": str(exc) if settings.debug else "Internal Server Error"
}
)
@app.exception_handler(ValueError)
async def value_error_handler(request: Request, exc: ValueError):
"""值错误处理器"""
request_id = getattr(request.state, "request_id", "unknown")
logger.warning(
"值错误",
request_id=request_id,
path=request.url.path,
method=request.method,
error=str(exc)
)
return JSONResponse(
status_code=400,
content={
"success": False,
"message": "请求参数错误",
"request_id": request_id,
"error": str(exc)
}
)
def setup_routes(app: FastAPI):
"""设置路由"""
@app.get("/")
async def root():
"""根路径 - 重定向到前端监控界面"""
from fastapi.responses import RedirectResponse
return RedirectResponse(url="/dashboard")
@app.get("/dashboard")
async def dashboard():
"""前端监控界面"""
return FileResponse("static/index.html")
@app.get("/health")
async def health_check():
"""健康检查"""
response_data = {
"status": "healthy",
"service": settings.app_name
}
return StandardResponse.success(response_data)
@app.get("/info")
async def app_info():
"""应用信息"""
response_data = {
"app_name": settings.app_name,
"version": settings.app_version,
"debug": settings.debug,
"host": settings.host,
"port": settings.port
}
return StandardResponse.success(response_data)
# 添加路由信息接口
@app.get("/routes")
async def get_routes_info():
"""获取所有路由信息"""
from ..api.discovery import get_registered_modules_info
response_data = {
"app_routes": [
{
"path": route.path,
"methods": list(route.methods),
"name": route.name
}
for route in app.routes
if hasattr(route, 'path')
],
"api_modules": get_registered_modules_info()
}
return StandardResponse.success(response_data)
# 自动发现并注册 API 模块
from ..api.internal.discovery import auto_register_routes
registration_result = auto_register_routes(app)
# 挂载静态文件目录(放在最后,避免覆盖其他路由)
app.mount("/", StaticFiles(directory="static", html=True), name="static")
# 生成离线API文档
generate_docs(app)
if registration_result["success"]:
logger.info(
"API模块自动注册完成",
registered_modules=registration_result["registered_modules"],
total_files=registration_result["total_files"]
)
else:
logger.error("API模块自动注册失败", error=registration_result.get("error"))
# 应用实例缓存
_app_instance: Optional[FastAPI] = None
# 用于确保线程安全的锁
_app_lock = Lock()
# 应用工厂函数
def get_app() -> FastAPI:
"""获取应用实例(线程安全的单例模式)"""
global _app_instance
# 双重检查锁定模式,确保线程安全
if _app_instance is None:
with _app_lock:
if _app_instance is None:
_app_instance = create_app()
return _app_instance

View File

@@ -0,0 +1,5 @@
from .logging import LoggingMiddleware
from .error_handling import ErrorHandlingMiddleware
from .performance import PerformanceMiddleware
__all__ = ["LoggingMiddleware", "ErrorHandlingMiddleware", "PerformanceMiddleware"]

View File

@@ -0,0 +1,59 @@
from fastapi import Request, Response
from starlette.middleware.base import BaseHTTPMiddleware
from starlette.types import ASGIApp
import json
from typing import Callable
from ..utils import get_logger
logger = get_logger(__name__)
class ErrorHandlingMiddleware(BaseHTTPMiddleware):
"""错误处理中间件"""
def __init__(self, app: ASGIApp):
super().__init__(app)
self.logger = get_logger("error_handling")
async def dispatch(self, request: Request, call_next: Callable) -> Response:
"""处理请求并捕获异常"""
try:
response = await call_next(request)
return response
except Exception as e:
request_id = getattr(request.state, "request_id", "unknown")
# 记录详细错误信息
self.logger.error(
"中间件捕获到未处理异常",
request_id=request_id,
method=request.method,
path=request.url.path,
query_params=str(request.query_params),
error_type=type(e).__name__,
error_message=str(e),
)
# 返回友好的错误响应
return self._create_error_response(
request_id=request_id,
error_message="服务器内部错误",
status_code=500
)
def _create_error_response(self, request_id: str, error_message: str, status_code: int) -> Response:
"""创建错误响应"""
error_data = {
"success": False,
"message": error_message,
"request_id": request_id,
"timestamp": json.dumps({"timestamp": "now"}) # 简单的时间戳
}
return Response(
content=json.dumps(error_data, ensure_ascii=False),
status_code=status_code,
media_type="application/json"
)

View File

@@ -0,0 +1,388 @@
from fastapi import Request, Response
from starlette.middleware.base import BaseHTTPMiddleware
from starlette.types import ASGIApp, Receive
import time
import uuid
import json
from typing import Callable, Optional
from ..utils import get_logger
from ..utils.advanced_logger import get_route_logger, advanced_logger_manager
from ..config import settings
logger = get_logger(__name__)
class LoggingMiddleware(BaseHTTPMiddleware):
"""请求日志中间件"""
def __init__(self, app: ASGIApp):
super().__init__(app)
self.logger = get_logger("request")
self.use_advanced_logging = settings.advanced_logging
self.use_route_based_logging = settings.route_based_logging
# 不记录日志的路径模式
SKIP_LOG_PATTERNS = (
'.html', '.css', '.js', '.ico', '.png', '.jpg', '.jpeg',
'.gif', '.svg', '.woff', '.woff2', '.ttf', '.eot', '.map'
)
# 不记录日志的路径前缀
SKIP_LOG_PREFIXES = (
'/static/', '/favicon', '/assets/', '/vendor/',
'/docs', '/redoc', '/openapi.json', '/.well-known/'
)
async def dispatch(self, request: Request, call_next: Callable) -> Response:
"""处理请求并记录日志"""
# 生成请求ID
request_id = str(uuid.uuid4())
request.state.request_id = request_id
# 检查是否需要跳过日志记录(静态资源)
path = request.url.path.lower()
skip_logging = (
path.endswith(self.SKIP_LOG_PATTERNS) or
any(path.startswith(prefix) for prefix in self.SKIP_LOG_PREFIXES)
)
# 记录请求开始时间
start_time = time.time()
# 获取客户端IP
client_ip = self._get_client_ip(request)
# 获取用户代理
user_agent = request.headers.get("user-agent", "Unknown")
# 获取路由名称
route_name = self._extract_route_name(request)
# 对于POST/PUT/PATCH请求需要缓存请求体以便后续使用
body_bytes = None
if request.method in ["POST", "PUT", "PATCH"]:
body_bytes = await request.body()
# 保存原始的 state 内容
original_state = {}
for key, value in request.state.__dict__.items():
original_state[key] = value
# 重新创建请求对象,将缓存的请求体设置回去
# 使用闭包来跟踪是否已经返回了body
body_sent = False
async def receive() -> dict:
nonlocal body_sent
if not body_sent:
body_sent = True
return {"type": "http.request", "body": body_bytes}
else:
return {"type": "http.request", "body": b""}
# 创建新的 scope保留原始的 state
# 使用 dict() 创建新的字典,避免修改原始 scope
new_scope = dict(request.scope)
# 确保 state 存在
if "state" not in new_scope:
new_scope["state"] = {}
# 恢复原始的 state 内容
for key, value in original_state.items():
new_scope["state"][key] = value
new_request = Request(new_scope, receive)
request = new_request
# 不再记录请求开始日志,只在响应时记录一次
try:
# 处理请求
response = await call_next(request)
# 计算处理时间
process_time = time.time() - start_time
# 读取响应数据(需要复制响应,因为响应流只能读取一次)
response_body = b""
async for chunk in response.body_iterator:
response_body += chunk
# 创建新的响应对象,因为原始响应流已经被消耗
new_response = Response(
content=response_body,
status_code=response.status_code,
headers=dict(response.headers),
media_type=response.media_type
)
# 尝试解析响应体
response_data = None
if response_body:
try:
# 只解析JSON响应
if new_response.headers.get("content-type") and "application/json" in new_response.headers.get("content-type"):
response_data = json.loads(response_body.decode("utf-8"))
except Exception:
# 如果无法解析,跳过
pass
# 记录响应信息(现在会同时包含请求和响应的信息)
if not skip_logging:
if self.use_advanced_logging and self.use_route_based_logging:
await self._log_response_advanced(
request=request,
response=new_response,
response_data=response_data,
route_name=route_name,
request_id=request_id,
process_time=process_time,
client_ip=client_ip,
body_bytes=body_bytes # 传递缓存的请求体
)
else:
await self._log_response(
request=request,
response=new_response,
response_data=response_data,
request_id=request_id,
process_time=process_time,
client_ip=client_ip
)
# 添加响应头
new_response.headers["X-Request-ID"] = request_id
new_response.headers["X-Process-Time"] = f"{process_time:.4f}"
return new_response
except Exception as e:
# 计算处理时间
process_time = time.time() - start_time
# 静态资源异常也跳过日志记录
if not skip_logging:
if self.use_advanced_logging and self.use_route_based_logging:
route_logger = get_route_logger(route_name)
route_logger.log_request(
method=request.method,
path=request.url.path,
status_code=500,
process_time=process_time,
client_ip=client_ip,
request_id=request_id,
user_agent=user_agent,
query_params=dict(request.query_params),
error=str(e)
)
else:
self.logger.error(
"请求处理异常",
request_id=request_id,
method=request.method,
path=request.url.path,
query_params=str(request.query_params),
client_ip=client_ip,
process_time=f"{process_time:.4f}s",
error=str(e),
error_type=type(e).__name__
)
# 重新抛出异常,让其他中间件或异常处理器处理
raise
def _get_client_ip(self, request: Request) -> str:
"""获取客户端IP地址"""
# 检查代理头
forwarded_for = request.headers.get("X-Forwarded-For")
if forwarded_for:
# 取第一个IP如果有多级代理
return forwarded_for.split(",")[0].strip()
real_ip = request.headers.get("X-Real-IP")
if real_ip:
return real_ip
# 返回直连IP
return request.client.host if request.client else "unknown"
async def _log_request(self, request: Request, request_id: str, client_ip: str, user_agent: str, body_bytes: Optional[bytes] = None):
"""记录请求信息"""
# 过滤敏感查询参数
safe_query_params = self._filter_sensitive_params(dict(request.query_params))
log_data = {
"request_id": request_id,
"method": request.method,
"path": request.url.path,
"query_params": safe_query_params,
"client_ip": client_ip,
"user_agent": user_agent,
"content_type": request.headers.get("content-type"),
"content_length": request.headers.get("content-length"),
}
# 如果是POST/PUT/PATCH请求尝试记录请求体
if request.method in ["POST", "PUT", "PATCH"] and body_bytes:
try:
body = self._parse_request_body(body_bytes, request.headers.get("content-type", ""), request_id)
if body:
log_data["request_body"] = body
except Exception:
# 如果无法解析请求体,跳过
pass
self.logger.info(
f"收到请求: {request.method} {request.url.path}",
**log_data
)
async def _log_response(self, request: Request, response: Response, response_data: Optional[dict], request_id: str, process_time: float, client_ip: str):
"""记录响应信息"""
log_data = {
"request_id": request_id,
"method": request.method,
"path": request.url.path,
"status_code": response.status_code,
"process_time": f"{process_time:.4f}s",
"client_ip": client_ip,
"content_type": response.headers.get("content-type"),
"content_length": response.headers.get("content-length"),
}
# 如果有响应数据,添加到日志中
if response_data:
log_data["response_body"] = response_data
# 根据状态码确定日志级别
if response.status_code >= 500:
log_level = "error"
elif response.status_code >= 400:
log_level = "warning"
else:
log_level = "info"
message = f"请求完成: {request.method} {request.url.path} - {response.status_code} ({process_time:.4f}s)"
getattr(self.logger, log_level)(message, **log_data)
def _parse_request_body(self, body_bytes: bytes, content_type: str, request_id: str) -> Optional[dict]:
"""解析请求体(从字节数据)"""
try:
if not body_bytes:
return None
if "application/json" in content_type:
return json.loads(body_bytes.decode("utf-8"))
elif "application/x-www-form-urlencoded" in content_type:
# 对于表单数据返回原始字符串因为已经读取了body无法再使用request.form()
return {"raw": body_bytes.decode("utf-8")}
# 对于其他内容类型,返回基本信息
return {"type": content_type, "size": len(body_bytes)}
except Exception as e:
self.logger.warning(
"无法解析请求体",
request_id=request_id,
error=str(e)
)
return {"parse_error": str(e)}
def _filter_sensitive_params(self, params: dict) -> dict:
"""过滤敏感查询参数"""
sensitive_keys = {
"password", "token", "secret", "key", "auth", "authorization",
"api_key", "access_token", "refresh_token"
}
filtered_params = {}
for key, value in params.items():
if any(sensitive in key.lower() for sensitive in sensitive_keys):
filtered_params[key] = "***"
else:
filtered_params[key] = value
return filtered_params
def _extract_route_name(self, request: Request) -> str:
"""从请求路径中提取路由名称"""
path = request.url.path.strip('/')
if not path:
return "root"
# 路径解析:/api/hello -> hello, /api/user/profile -> user
path_parts = path.split('/')
# 如果有api前缀跳过它
if path_parts and path_parts[0] == 'api':
path_parts = path_parts[1:]
# 取第一个主要部分作为路由名
if path_parts:
route_name = path_parts[0]
# 清理路由名,只保留字母数字和下划线
import re
route_name = re.sub(r'[^a-zA-Z0-9_]', '', route_name)
return route_name if route_name else "unknown"
return "unknown"
async def _log_request_advanced(self, request: Request, route_name: str, request_id: str, client_ip: str, user_agent: str, body_bytes: Optional[bytes] = None):
"""使用高级日志系统记录请求信息"""
route_logger = get_route_logger(route_name)
# 获取查询参数
query_params = self._filter_sensitive_params(dict(request.query_params))
# 获取请求体如果是POST/PUT/PATCH
request_body = None
if request.method in ["POST", "PUT", "PATCH"] and body_bytes:
try:
request_body = self._parse_request_body(body_bytes, request.headers.get("content-type", ""), request_id)
except Exception:
request_body = {"parse_error": "Failed to parse request body"}
route_logger.log_info(
f"收到请求: {request.method} {request.url.path}",
request_id=request_id,
method=request.method,
path=request.url.path,
query_params=query_params,
client_ip=client_ip,
user_agent=user_agent,
content_type=request.headers.get("content-type"),
content_length=request.headers.get("content-length"),
request_body=request_body
)
async def _log_response_advanced(self, request: Request, response: Response, response_data: Optional[dict], route_name: str,
request_id: str, process_time: float, client_ip: str, body_bytes: Optional[bytes] = None):
"""使用高级日志系统记录完整的请求和响应信息"""
route_logger = get_route_logger(route_name)
# 获取请求体
request_body = None
if request.method in ["POST", "PUT", "PATCH"] and body_bytes:
try:
request_body = self._parse_request_body(body_bytes, request.headers.get("content-type", ""), request_id)
except Exception:
request_body = {"parse_error": "Failed to parse request body"}
# 获取查询参数
query_params = self._filter_sensitive_params(dict(request.query_params))
route_logger.log_request(
method=request.method,
path=request.url.path,
status_code=response.status_code,
process_time=process_time,
client_ip=client_ip,
request_id=request_id,
user_agent=request.headers.get("user-agent", "Unknown"),
query_params=query_params,
request_body=request_body,
response_body=response_data,
content_type=request.headers.get("content-type"),
content_length=request.headers.get("content-length")
)

View File

@@ -0,0 +1,65 @@
from fastapi import Request, Response
from starlette.middleware.base import BaseHTTPMiddleware
from starlette.types import ASGIApp
import time
from typing import Callable
from ..utils import get_logger
logger = get_logger(__name__)
class PerformanceMiddleware(BaseHTTPMiddleware):
"""性能监控中间件"""
# 类变量,用于存储实例引用
_instance = None
def __init__(self, app: ASGIApp):
super().__init__(app)
self.logger = get_logger("performance")
self.total_requests = 0
self.requests_per_status = {}
self.total_response_time = 0
# 保存实例引用
PerformanceMiddleware._instance = self
async def dispatch(self, request: Request, call_next: Callable) -> Response:
"""处理请求并添加性能头"""
start_time = time.time()
try:
response = await call_next(request)
response_time = time.time() - start_time
# 添加性能头
response.headers["X-Response-Time"] = f"{response_time:.4f}"
# 更新请求计数
self.total_requests += 1
status_code = response.status_code
if status_code not in self.requests_per_status:
self.requests_per_status[status_code] = 0
self.requests_per_status[status_code] += 1
# 更新总响应时间
self.total_response_time += response_time
return response
except Exception as e:
raise
def get_stats(self):
"""获取性能统计信息"""
return {
"total_requests": self.total_requests,
"requests_per_status": self.requests_per_status,
"total_response_time": self.total_response_time,
"average_response_time": self.total_response_time / self.total_requests if self.total_requests > 0 else 0
}
@classmethod
def get_instance(cls):
"""获取中间件实例"""
return cls._instance

View File

@@ -0,0 +1,63 @@
"""
统一数据模型模块
"""
# 基础模型
from .base.response import (
BaseResponse,
ErrorResponse,
DataResponse,
PaginatedResponse
)
# 响应模型
from .response import (
StandardResponse,
RequestInfo,
ResponseInfo
)
# 业务模块模型
from .modules import (
UserCreateRequest,
UserUpdateRequest,
UserResponse,
HelloRequest,
HelloResponse,
GreetResponse,
SceneRequest,
SceneResponse,
ExampleUserRequest,
ExampleUserResponse,
)
__all__ = [
# 基础响应模型
"BaseResponse",
"ErrorResponse",
"DataResponse",
"PaginatedResponse",
# 标准响应模型
"StandardResponse",
"RequestInfo",
"ResponseInfo",
# 用户相关模型
"UserCreateRequest",
"UserUpdateRequest",
"UserResponse",
# Hello API模型
"HelloRequest",
"HelloResponse",
"GreetResponse",
# 场景相关模型
"SceneRequest",
"SceneResponse",
# 示例模型
"ExampleUserRequest",
"ExampleUserResponse",
]

View File

View File

@@ -0,0 +1,37 @@
"""
基础数据模型
"""
from datetime import datetime
from typing import Any, Dict, Optional
from pydantic import BaseModel, Field
class BaseResponse(BaseModel):
"""基础响应模型"""
success: bool = True
message: str
timestamp: float = Field(default_factory=lambda: datetime.now().timestamp())
class ErrorResponse(BaseResponse):
"""错误响应模型"""
success: bool = False
message: str
code: str
timestamp: float = Field(default_factory=lambda: datetime.now().timestamp())
class DataResponse(BaseResponse):
"""数据响应模型"""
data: Any
class PaginatedResponse(BaseResponse):
"""分页响应模型"""
data: Dict[str, Any] # 包含 items 和 pagination

View File

@@ -0,0 +1,49 @@
"""
业务模块数据模型
"""
from .user import (
UserCreateRequest,
UserUpdateRequest,
UserResponse
)
from .hello import (
HelloRequest,
HelloResponse,
GreetResponse
)
from .scene import (
SceneRequest,
SceneResponse
)
from .example import (
ExampleUserRequest,
ExampleUserResponse
)
__all__ = [
# 用户相关模型
"UserCreateRequest",
"UserUpdateRequest",
"UserResponse",
# Hello API模型
"HelloRequest",
"HelloResponse",
"GreetResponse",
# 场景相关模型
"SceneRequest",
"SceneResponse",
# 示例模型
"ExampleUserRequest",
"ExampleUserResponse",
]

View File

@@ -0,0 +1,28 @@
"""
Example API 数据模型
"""
from datetime import datetime
from typing import Optional
from pydantic import BaseModel, Field
class ExampleUserRequest(BaseModel):
"""示例用户请求模型"""
name: str = Field(..., description="用户姓名", min_length=1, max_length=50)
email: str = Field(..., description="用户邮箱")
age: Optional[int] = Field(None, ge=0, le=150, description="用户年龄")
class ExampleUserResponse(BaseModel):
"""示例用户响应模型"""
id: int
name: str
email: str
age: Optional[int]
created_at: float

View File

@@ -0,0 +1,30 @@
"""
Hello API 数据模型
"""
from typing import Optional
from pydantic import BaseModel, Field
class HelloRequest(BaseModel):
"""Hello请求模型"""
name: str = Field(..., description="你的名字", min_length=1, max_length=50)
message: Optional[str] = Field(None, description="自定义消息")
class HelloResponse(BaseModel):
"""Hello响应模型"""
greeting: str
message: str
timestamp: str
class GreetResponse(BaseModel):
"""Greet响应模型"""
message: str
visitor_count: int

View File

@@ -0,0 +1,27 @@
"""
场景注册 API 数据模型
"""
from typing import Optional, List
from pydantic import BaseModel, Field
class SceneRequest(BaseModel):
"""场景注册请求模型"""
name: str = Field(..., description="场景名称", min_length=1, max_length=100)
description: Optional[str] = Field(None, description="场景描述")
tags: List[str] = Field(default_factory=list, description="场景标签")
class SceneResponse(BaseModel):
"""场景响应模型"""
id: int
name: str
description: Optional[str]
tags: List[str]
created_at: str
updated_at: str

View File

@@ -0,0 +1,36 @@
"""
用户相关数据模型
"""
from datetime import datetime
from typing import Optional
from pydantic import BaseModel, Field
class UserCreateRequest(BaseModel):
"""创建用户请求模型"""
name: str = Field(..., description="用户姓名", min_length=1, max_length=50)
email: str = Field(..., description="用户邮箱", pattern=r'^[^@]+@[^@]+\.[^@]+$')
age: Optional[int] = Field(None, ge=0, le=150, description="用户年龄")
class UserUpdateRequest(BaseModel):
"""更新用户请求模型"""
name: Optional[str] = Field(None, min_length=1, max_length=50, description="用户姓名")
email: Optional[str] = Field(None, pattern=r'^[^@]+@[^@]+\.[^@]+$', description="用户邮箱")
age: Optional[int] = Field(None, ge=0, le=150, description="用户年龄")
class UserResponse(BaseModel):
"""用户响应模型"""
id: int
name: str
email: str
age: Optional[int]
created_at: str
updated_at: Optional[str] = None

View File

@@ -0,0 +1,19 @@
"""
响应相关模型模块
"""
from .standard import (
StandardResponse,
RequestInfo,
ResponseInfo
)
__all__ = [
"StandardResponse",
"RequestInfo",
"ResponseInfo",
]

View File

@@ -0,0 +1,51 @@
from datetime import datetime
from typing import Any, Dict, Optional
from pydantic import BaseModel
class StandardResponse(BaseModel):
"""标准响应格式"""
status: int # 0表示失败1表示成功
response: Any # 返回结果的内容,由每个接口自定义
time: str # 当前时间
@classmethod
def success(cls, data: Any = None) -> "StandardResponse":
"""创建成功响应"""
return cls(
status=1,
response=data if data is not None else {},
time=datetime.now().strftime("%Y-%m-%d %H:%M:%S")
)
@classmethod
def error(cls, error_msg: str = "", error_code: int = 0) -> "StandardResponse":
"""创建错误响应"""
return cls(
status=0,
response={
"error": error_msg,
"code": error_code
},
time=datetime.now().strftime("%Y-%m-%d %H:%M:%S")
)
class RequestInfo(BaseModel):
"""请求信息模型"""
method: str
path: str
query_params: Dict[str, Any] = {}
headers: Dict[str, str] = {}
body: Any = None
class ResponseInfo(BaseModel):
"""响应信息模型"""
status_code: int
headers: Dict[str, str] = {}
body: Any = None

View File

@@ -0,0 +1 @@
# 业务服务模块

View File

@@ -0,0 +1,472 @@
"""
文件上传服务
提供文件上传、存储、管理等功能
"""
import os
import uuid
import hashlib
import aiofiles
from typing import Dict, List, Optional, BinaryIO
from datetime import datetime
from pathlib import Path
from enum import Enum
class FileUploadStatus(Enum):
"""文件上传状态"""
UPLOADING = "uploading"
COMPLETED = "completed"
FAILED = "failed"
class UploadedFile:
"""已上传文件信息"""
def __init__(
self,
file_id: str,
filename: str,
original_filename: str,
file_path: str,
file_size: int,
content_type: Optional[str] = None,
description: Optional[str] = None
):
self.file_id = file_id
self.filename = filename # 存储的文件名带ID
self.original_filename = original_filename # 原始文件名
self.file_path = file_path # 完整文件路径
self.file_size = file_size
self.content_type = content_type
self.description = description
self.status = FileUploadStatus.COMPLETED
self.uploaded_at = datetime.now().isoformat()
self.updated_at = datetime.now().isoformat()
self.download_count = 0
self.file_hash = None # MD5哈希值可选
def to_dict(self):
"""转换为字典"""
return {
"file_id": self.file_id,
"filename": self.filename,
"original_filename": self.original_filename,
"file_path": self.file_path,
"file_size": self.file_size,
"content_type": self.content_type,
"description": self.description,
"status": self.status.value,
"uploaded_at": self.uploaded_at,
"updated_at": self.updated_at,
"download_count": self.download_count,
"file_hash": self.file_hash
}
class FileUploadService:
"""文件上传服务"""
def __init__(self, upload_dir: str = "data"):
"""
初始化文件上传服务
Args:
upload_dir: 文件上传目录,相对于项目根目录
"""
# 获取项目根目录FT-Platform目录
# file_upload.py位于: X-Request/src/services/file_upload.py
# 需要向上4级到达项目根目录: d:/Code/Project/FT-Platform
current_file = Path(__file__)
project_root = current_file.parent.parent.parent.parent
upload_path = Path(upload_dir)
if upload_path.is_absolute():
self.upload_dir = upload_path
else:
# 使用项目根目录的data文件夹
self.upload_dir = project_root / upload_dir
self.upload_dir.mkdir(parents=True, exist_ok=True)
# 调试信息:打印实际的文件存储路径
print(f"[FileUploadService] 文件存储路径: {self.upload_dir}")
# 文件信息存储(内存存储,可扩展为数据库)
self.files: Dict[str, UploadedFile] = {}
# 配置
self.max_file_size = 100 * 1024 * 1024 # 100MB 默认最大文件大小
self.allowed_extensions = None # None表示允许所有扩展名
def _generate_file_id(self) -> str:
"""生成唯一文件ID"""
return str(uuid.uuid4())
def _generate_storage_filename(self, original_filename: str, file_id: str) -> str:
"""生成存储文件名"""
# 获取文件扩展名
ext = Path(original_filename).suffix
# 使用文件ID + 扩展名
return f"{file_id}{ext}"
def _calculate_file_hash(self, file_path: Path) -> str:
"""计算文件MD5哈希值"""
hash_md5 = hashlib.md5()
with open(file_path, "rb") as f:
for chunk in iter(lambda: f.read(4096), b""):
hash_md5.update(chunk)
return hash_md5.hexdigest()
async def upload_file(
self,
file_content: bytes,
original_filename: str,
content_type: Optional[str] = None,
description: Optional[str] = None
) -> UploadedFile:
"""
上传文件
Args:
file_content: 文件内容(字节)
original_filename: 原始文件名
content_type: 文件MIME类型
description: 文件描述
Returns:
UploadedFile: 上传的文件信息
Raises:
ValueError: 文件大小超限或扩展名不允许
"""
# 检查文件大小
file_size = len(file_content)
if file_size > self.max_file_size:
raise ValueError(f"文件大小超过限制 ({self.max_file_size / 1024 / 1024:.0f}MB)")
# 检查文件扩展名
if self.allowed_extensions:
ext = Path(original_filename).suffix.lower()
if ext not in self.allowed_extensions:
raise ValueError(f"不允许的文件类型: {ext}")
# 生成文件ID和存储文件名
file_id = self._generate_file_id()
storage_filename = self._generate_storage_filename(original_filename, file_id)
file_path = self.upload_dir / storage_filename
# 保存文件
async with aiofiles.open(file_path, 'wb') as f:
await f.write(file_content)
# 计算文件哈希
file_hash = self._calculate_file_hash(file_path)
# 创建文件信息对象
uploaded_file = UploadedFile(
file_id=file_id,
filename=storage_filename,
original_filename=original_filename,
file_path=str(file_path),
file_size=file_size,
content_type=content_type,
description=description
)
uploaded_file.file_hash = file_hash
# 存储文件信息
self.files[file_id] = uploaded_file
# 更新文件名映射
await self._update_filename_mapping(file_id, original_filename, storage_filename)
return uploaded_file
async def upload_file_stream(
self,
file_stream: BinaryIO,
original_filename: str,
content_type: Optional[str] = None,
description: Optional[str] = None
) -> UploadedFile:
"""
通过文件流上传文件(适用于大文件)
Args:
file_stream: 文件流对象
original_filename: 原始文件名
content_type: 文件MIME类型
description: 文件描述
Returns:
UploadedFile: 上传的文件信息
"""
# 读取文件内容
file_content = await file_stream.read()
# 使用普通上传方法
return await self.upload_file(
file_content=file_content,
original_filename=original_filename,
content_type=content_type,
description=description
)
def get_file(self, file_id: str) -> Optional[UploadedFile]:
"""
获取文件信息
Args:
file_id: 文件ID
Returns:
UploadedFile: 文件信息不存在返回None
"""
return self.files.get(file_id)
def get_file_path(self, file_id: str) -> Optional[Path]:
"""
获取文件的完整路径
Args:
file_id: 文件ID
Returns:
Path: 文件路径对象不存在返回None
"""
file_info = self.files.get(file_id)
if not file_info:
return None
return Path(file_info.file_path)
def get_all_files(self) -> List[UploadedFile]:
"""
获取所有文件信息
Returns:
List[UploadedFile]: 文件信息列表
"""
return list(self.files.values())
def delete_file(self, file_id: str) -> bool:
"""
删除文件
Args:
file_id: 文件ID
Returns:
bool: 删除是否成功
"""
file_info = self.files.get(file_id)
if not file_info:
return False
# 删除物理文件
file_path = Path(file_info.file_path)
if file_path.exists():
try:
file_path.unlink()
except Exception:
pass # 忽略删除错误
# 删除文件信息
del self.files[file_id]
# 删除文件名映射
import asyncio
try:
loop = asyncio.get_event_loop()
if loop.is_running():
# 如果在异步上下文中使用create_task
asyncio.create_task(self._remove_filename_mapping(file_id))
else:
# 否则直接运行
loop.run_until_complete(self._remove_filename_mapping(file_id))
except Exception:
pass # 忽略错误
return True
def file_exists(self, file_id: str) -> bool:
"""
检查文件是否存在
Args:
file_id: 文件ID
Returns:
bool: 文件是否存在
"""
return file_id in self.files
async def get_file_content(self, file_id: str) -> Optional[bytes]:
"""
获取文件内容
Args:
file_id: 文件ID
Returns:
bytes: 文件内容不存在返回None
"""
file_info = self.files.get(file_id)
if not file_info:
return None
file_path = Path(file_info.file_path)
if not file_path.exists():
return None
async with aiofiles.open(file_path, 'rb') as f:
content = await f.read()
# 更新下载计数
file_info.download_count += 1
file_info.updated_at = datetime.now().isoformat()
return content
def increment_download_count(self, file_id: str):
"""
增加下载计数
Args:
file_id: 文件ID
"""
file_info = self.files.get(file_id)
if file_info:
file_info.download_count += 1
file_info.updated_at = datetime.now().isoformat()
def update_file_description(self, file_id: str, description: str) -> bool:
"""
更新文件描述
Args:
file_id: 文件ID
description: 新描述
Returns:
bool: 更新是否成功
"""
file_info = self.files.get(file_id)
if not file_info:
return False
file_info.description = description
file_info.updated_at = datetime.now().isoformat()
return True
def get_storage_statistics(self) -> Dict:
"""
获取存储统计信息
Returns:
Dict: 统计信息
"""
total_files = len(self.files)
total_size = sum(f.file_size for f in self.files.values())
total_downloads = sum(f.download_count for f in self.files.values())
return {
"total_files": total_files,
"total_size": total_size,
"total_size_mb": round(total_size / 1024 / 1024, 2),
"total_downloads": total_downloads,
"storage_dir": str(self.upload_dir)
}
async def _update_filename_mapping(self, file_id: str, original_filename: str, storage_filename: str):
"""
更新文件名映射文件
Args:
file_id: 文件ID
original_filename: 原始文件名
storage_filename: 存储文件名
"""
import json
mapping_file = self.upload_dir / "filename_mapping.json"
# 读取现有映射
mappings = {}
if mapping_file.exists():
try:
async with aiofiles.open(mapping_file, 'r', encoding='utf-8') as f:
content = await f.read()
data = json.loads(content)
mappings = data.get("mappings", {})
except Exception:
mappings = {}
# 添加新映射
mappings[file_id] = {
"original_filename": original_filename,
"storage_filename": storage_filename,
"uploaded_at": datetime.now().isoformat()
}
# 写入映射文件
data = {"mappings": mappings}
async with aiofiles.open(mapping_file, 'w', encoding='utf-8') as f:
await f.write(json.dumps(data, ensure_ascii=False, indent=2))
async def _remove_filename_mapping(self, file_id: str):
"""
从映射文件中删除文件映射
Args:
file_id: 文件ID
"""
import json
mapping_file = self.upload_dir / "filename_mapping.json"
if not mapping_file.exists():
return
try:
# 读取现有映射
async with aiofiles.open(mapping_file, 'r', encoding='utf-8') as f:
content = await f.read()
data = json.loads(content)
mappings = data.get("mappings", {})
# 删除指定映射
if file_id in mappings:
del mappings[file_id]
# 写入映射文件
data = {"mappings": mappings}
async with aiofiles.open(mapping_file, 'w', encoding='utf-8') as f:
await f.write(json.dumps(data, ensure_ascii=False, indent=2))
except Exception:
pass # 忽略错误
def get_filename_mapping(self, file_id: str) -> Optional[Dict]:
"""
获取文件名映射信息
Args:
file_id: 文件ID
Returns:
Dict: 映射信息不存在返回None
"""
import json
mapping_file = self.upload_dir / "filename_mapping.json"
if not mapping_file.exists():
return None
try:
with open(mapping_file, 'r', encoding='utf-8') as f:
data = json.load(f)
mappings = data.get("mappings", {})
return mappings.get(file_id)
except Exception:
return None
# 全局文件上传服务实例
file_upload_service = FileUploadService()

View File

@@ -0,0 +1,169 @@
"""
微调任务管理服务
"""
import asyncio
from typing import Dict, List, Optional
from datetime import datetime
from enum import Enum
class FineTuneStatus(Enum):
"""微调任务状态"""
PENDING = "pending"
RUNNING = "running"
COMPLETED = "completed"
FAILED = "failed"
CANCELED = "canceled"
class FineTuneTask:
"""微调任务"""
def __init__(self, task_id: str, name: str, description: str):
self.task_id = task_id
self.name = name
self.description = description
self.status = FineTuneStatus.PENDING
self.progress = 0.0
self.created_at = datetime.now().isoformat()
self.updated_at = datetime.now().isoformat()
self.logs: List[str] = []
def update_status(self, status: FineTuneStatus):
"""更新任务状态"""
self.status = status
self.updated_at = datetime.now().isoformat()
def update_progress(self, progress: float, log: str = None):
"""更新任务进度"""
self.progress = min(max(progress, 0.0), 100.0)
self.updated_at = datetime.now().isoformat()
if log:
self.logs.append(f"[{datetime.now().strftime('%Y-%m-%d %H:%M:%S')}] {log}")
def to_dict(self):
"""转换为字典"""
return {
"task_id": self.task_id,
"name": self.name,
"description": self.description,
"status": self.status.value,
"progress": self.progress,
"created_at": self.created_at,
"updated_at": self.updated_at,
"logs": self.logs
}
class FineTuneManager:
"""微调任务管理器"""
def __init__(self):
self.tasks: Dict[str, FineTuneTask] = {}
self.listeners: Dict[str, List[asyncio.Queue]] = {}
def create_task(self, task_id: str, name: str, description: str) -> FineTuneTask:
"""创建新任务"""
task = FineTuneTask(task_id, name, description)
self.tasks[task_id] = task
self.listeners[task_id] = []
return task
def get_task(self, task_id: str) -> Optional[FineTuneTask]:
"""获取任务"""
return self.tasks.get(task_id)
def get_all_tasks(self) -> List[FineTuneTask]:
"""获取所有任务"""
return list(self.tasks.values())
def update_task_status(self, task_id: str, status: FineTuneStatus):
"""更新任务状态"""
task = self.tasks.get(task_id)
if task:
task.update_status(status)
self._notify_listeners(task_id)
def update_task_progress(self, task_id: str, progress: float, log: str = None):
"""更新任务进度"""
task = self.tasks.get(task_id)
if task:
task.update_progress(progress, log)
self._notify_listeners(task_id)
def delete_task(self, task_id: str):
"""删除任务"""
if task_id in self.tasks:
del self.tasks[task_id]
if task_id in self.listeners:
# 通知所有监听器任务已删除
for queue in self.listeners[task_id]:
queue.put_nowait(None) # None表示任务已删除
del self.listeners[task_id]
async def add_listener(self, task_id: str) -> asyncio.Queue:
"""添加任务监听器"""
queue = asyncio.Queue()
if task_id in self.listeners:
self.listeners[task_id].append(queue)
else:
self.listeners[task_id] = [queue]
return queue
def remove_listener(self, task_id: str, queue: asyncio.Queue):
"""移除任务监听器"""
if task_id in self.listeners:
self.listeners[task_id].remove(queue)
# 如果没有监听器了,清理
if not self.listeners[task_id]:
del self.listeners[task_id]
def _notify_listeners(self, task_id: str):
"""通知所有监听器"""
if task_id in self.listeners:
task = self.tasks.get(task_id)
if task:
for queue in self.listeners[task_id]:
queue.put_nowait(task.to_dict())
def task_exists(self, task_id: str) -> bool:
"""检查任务是否存在"""
return task_id in self.tasks
# 全局微调任务管理器实例
fine_tune_manager = FineTuneManager()
async def simulate_fine_tune(task_id: str):
"""模拟微调任务执行"""
manager = fine_tune_manager
task = manager.get_task(task_id)
if not task:
return
# 更新状态为运行中
manager.update_task_status(task_id, FineTuneStatus.RUNNING)
# 模拟训练过程
steps = [
("初始化模型", 10),
("加载数据集", 20),
("训练第1轮", 35),
("训练第2轮", 50),
("训练第3轮", 65),
("训练第4轮", 80),
("模型评估", 90),
("保存模型", 100)
]
for step_name, step_progress in steps:
# 模拟步骤执行
await asyncio.sleep(2)
manager.update_task_progress(task_id, step_progress, f"{step_name}完成")
# 更新状态为完成
manager.update_task_status(task_id, FineTuneStatus.COMPLETED)
manager.update_task_progress(task_id, 100.0, "微调任务已完成")

View File

@@ -0,0 +1,81 @@
# 统一日志系统 - 推荐使用
from .unified_logger import (
get_logger as get_unified_logger,
get_route_logger,
configure_logging,
cleanup_logs,
get_log_stats,
unified_logger_manager,
UnifiedLogger,
)
# 向后兼容 - 保持原有接口
from .logger import (
get_logger,
log,
log_debug,
log_info,
log_warning,
log_error,
log_critical,
logger_manager,
)
from .decorators import (
log_function_call,
log_performance,
retry_on_failure,
log_it,
time_it,
)
from .exceptions import (
BaseAPIException,
ValidationException,
AuthenticationException,
AuthorizationException,
NotFoundException,
ConflictException,
RateLimitException,
ExternalServiceException,
BusinessException,
)
__all__ = [
# 统一日志系统 (推荐使用)
"get_unified_logger",
"get_route_logger",
"configure_logging",
"cleanup_logs",
"get_log_stats",
"unified_logger_manager",
"UnifiedLogger",
# 向后兼容接口
"get_logger",
"log",
"log_debug",
"log_info",
"log_warning",
"log_error",
"log_critical",
"logger_manager",
# Decorators
"log_function_call",
"log_performance",
"retry_on_failure",
"log_it",
"time_it",
# Exceptions
"BaseAPIException",
"ValidationException",
"AuthenticationException",
"AuthorizationException",
"NotFoundException",
"ConflictException",
"RateLimitException",
"ExternalServiceException",
"BusinessException",
]

View File

@@ -0,0 +1,435 @@
import os
import shutil
import logging
from datetime import datetime, timedelta
from pathlib import Path
from typing import Dict, Optional, Any
from concurrent.futures import ThreadPoolExecutor
import threading
import json
from .logger import get_logger
class AdvancedLoggerManager:
"""高级日志管理器 - 支持按日期和路由分类"""
def __init__(self,
logs_dir: str = "logs",
max_log_days: int = 30,
enable_cleanup: bool = True):
self.logs_dir = Path(logs_dir)
self.max_log_days = max_log_days
self.enable_cleanup = enable_cleanup
self._file_handlers: Dict[str, logging.FileHandler] = {}
self._lock = threading.Lock()
self._cleanup_executor = ThreadPoolExecutor(max_workers=1, thread_name_prefix="log-cleanup")
# 确保logs目录存在
self.logs_dir.mkdir(exist_ok=True)
# 获取内部logger
self.logger = get_logger(__name__)
def get_date_log_dir(self, date: Optional[datetime] = None) -> Path:
"""获取指定日期的日志目录"""
if date is None:
date = datetime.now()
date_str = date.strftime("%Y-%m-%d")
return self.logs_dir / date_str
def get_route_log_path(self, route_name: str, date: Optional[datetime] = None) -> Path:
"""获取指定路由和日期的日志文件路径"""
date_dir = self.get_date_log_dir(date)
return date_dir / f"{route_name}.log"
def ensure_date_dir(self, date: Optional[datetime] = None) -> Path:
"""确保日期目录存在"""
date_dir = self.get_date_log_dir(date)
date_dir.mkdir(parents=True, exist_ok=True)
return date_dir
def get_file_handler(self, route_name: str, date: Optional[datetime] = None) -> logging.FileHandler:
"""获取或创建指定路由的文件处理器"""
date_str = date.strftime("%Y-%m-%d") if date else datetime.now().strftime("%Y-%m-%d")
handler_key = f"{date_str}_{route_name}"
with self._lock:
# 检查是否已有缓存的处理句柄
if handler_key in self._file_handlers:
handler = self._file_handlers[handler_key]
# 检查文件是否仍然有效(防止日志轮转)
if handler.baseFilename == str(self.get_route_log_path(route_name, date)):
return handler
else:
# 文件路径已变化,清理旧的处理句柄
handler.close()
del self._file_handlers[handler_key]
# 创建新的日期目录
try:
date_dir = self.ensure_date_dir(date)
if not date_dir.exists():
self.logger.error(f"Failed to create date directory: {date_dir}")
except Exception as e:
self.logger.error(f"Error creating date directory: {str(e)}")
raise
# 创建新的文件处理器
try:
log_file_path = self.get_route_log_path(route_name, date)
# 确保父目录存在
log_file_path.parent.mkdir(parents=True, exist_ok=True)
handler = logging.FileHandler(str(log_file_path), encoding='utf-8', mode='a')
# 设置JSON格式化器
handler.setFormatter(JsonLogFormatter())
# 缓存处理句柄
self._file_handlers[handler_key] = handler
return handler
except Exception as e:
self.logger.error(f"Error creating file handler for {route_name}: {str(e)}")
raise
def log_request(self,
route_name: str,
method: str,
path: str,
status_code: int,
process_time: float,
client_ip: str,
request_id: str,
user_agent: str = "Unknown",
query_params: Optional[Dict] = None,
request_body: Optional[Dict] = None,
response_body: Optional[Dict] = None,
error: Optional[str] = None,
content_type: Optional[str] = None,
content_length: Optional[str] = None):
"""记录请求日志 - 成功写入*_success.log失败写入*_error.log"""
# 确定日志级别
if error or status_code >= 400:
log_level = "ERROR"
else:
log_level = "INFO"
# 构建请求信息
request_info = {
"method": method,
"path": path,
"query_params": query_params or {},
"client_ip": client_ip,
"request_id": request_id,
"user_agent": user_agent
}
# 添加请求头信息
if content_type:
request_info["content_type"] = content_type
if content_length:
request_info["content_length"] = content_length
if request_body:
request_info["body"] = request_body
# 构建响应信息
response_info = {
"status_code": status_code,
"process_time_ms": round(process_time * 1000, 2)
}
# 如果有响应体,添加到响应信息中
if response_body:
response_info["body"] = response_body
# 如果有错误,添加到响应信息中
if error:
response_info["error"] = error
# 构建日志数据
log_data = {
"timestamp": datetime.now().isoformat(),
"level": log_level,
"route": route_name,
"request": request_info,
"response": response_info
}
# 根据状态码决定写入成功还是失败日志文件
if status_code == 200 and not error:
# 只有200状态码才算是成功
success_log_name = f"{route_name}_success"
self._write_to_route_log(success_log_name, log_data)
else:
# 其他所有状态码包括3xx重定向都算作失败
error_log_name = f"{route_name}_error"
self._write_to_route_log(error_log_name, log_data)
def _write_to_route_log(self, route_name: str, log_data: Dict[str, Any]):
"""写入日志到指定路由的文件"""
try:
# 获取日志文件路径
log_file_path = self.get_route_log_path(route_name)
# 确保目录存在
log_file_path.parent.mkdir(parents=True, exist_ok=True)
# 格式化为易读的JSON
formatted_log = json.dumps(log_data, ensure_ascii=False, indent=2, default=str)
# 构建完整的日志条目(包含分割线)
log_entry = f"{'='*80}\n{formatted_log}\n"
# 直接写入文件(更可靠的方法)
with open(str(log_file_path), 'a', encoding='utf-8') as f:
f.write(log_entry)
f.flush()
except Exception as e:
# 如果写入失败使用标准logger记录错误并打印详细错误信息
import traceback
error_details = traceback.format_exc()
try:
self.logger.error(
f"Failed to write log to {route_name}",
error=str(e),
error_type=type(e).__name__,
traceback=error_details,
log_data=log_data
)
except:
# 如果连标准logger都失败了至少打印到控制台
print(f"CRITICAL: Failed to write log to {route_name}: {str(e)}")
print(error_details)
def get_route_logger(self, route_name: str):
"""获取指定路由的logger实例"""
return RouteLogger(self, route_name)
def cleanup_old_logs(self, days: Optional[int] = None) -> Dict[str, Any]:
"""清理过期的日志文件"""
if not self.enable_cleanup:
return {"status": "disabled", "message": "Log cleanup is disabled"}
cleanup_days = days or self.max_log_days
cutoff_date = datetime.now() - timedelta(days=cleanup_days)
cleanup_stats = {
"deleted_dirs": 0,
"deleted_files": 0,
"freed_bytes": 0,
"errors": []
}
try:
# 遍历logs目录下的所有日期文件夹
for date_dir in self.logs_dir.iterdir():
if not date_dir.is_dir():
continue
# 尝试解析日期
try:
dir_date = datetime.strptime(date_dir.name, "%Y-%m-%d")
except ValueError:
# 不是日期格式的目录,跳过
continue
# 如果目录日期早于截止日期,删除整个目录
if dir_date < cutoff_date:
try:
dir_size = self._get_dir_size(date_dir)
shutil.rmtree(date_dir)
cleanup_stats["deleted_dirs"] += 1
cleanup_stats["freed_bytes"] += dir_size
self.logger.info(f"Deleted old log directory: {date_dir}")
except Exception as e:
error_msg = f"Failed to delete directory {date_dir}: {str(e)}"
cleanup_stats["errors"].append(error_msg)
self.logger.error(error_msg)
except Exception as e:
error_msg = f"Cleanup failed: {str(e)}"
cleanup_stats["errors"].append(error_msg)
self.logger.error(error_msg)
# 清理内存中的旧处理句柄
self._cleanup_old_handlers(cutoff_date)
cleanup_stats["freed_mb"] = round(cleanup_stats["freed_bytes"] / (1024 * 1024), 2)
return cleanup_stats
def _get_dir_size(self, directory: Path) -> int:
"""获取目录大小"""
total_size = 0
try:
for file_path in directory.rglob("*"):
if file_path.is_file():
total_size += file_path.stat().st_size
except Exception:
pass
return total_size
def _cleanup_old_handlers(self, cutoff_date: datetime):
"""清理内存中的旧处理句柄"""
with self._lock:
keys_to_remove = []
for key, handler in self._file_handlers.items():
try:
date_str = key.split("_")[0]
handler_date = datetime.strptime(date_str, "%Y-%m-%d")
if handler_date < cutoff_date:
handler.close()
keys_to_remove.append(key)
except Exception:
# 如果解析失败,也清理掉
keys_to_remove.append(key)
for key in keys_to_remove:
del self._file_handlers[key]
def schedule_cleanup(self):
"""计划清理任务(每天执行一次)"""
if not self.enable_cleanup:
return
def cleanup_task():
try:
result = self.cleanup_old_logs()
if result.get("deleted_dirs", 0) > 0:
self.logger.info(f"Log cleanup completed: {result}")
except Exception as e:
self.logger.error(f"Scheduled cleanup failed: {str(e)}")
# 提交清理任务到线程池
self._cleanup_executor.submit(cleanup_task)
def get_log_stats(self) -> Dict[str, Any]:
"""获取日志统计信息"""
stats = {
"total_directories": 0,
"total_files": 0,
"total_size_bytes": 0,
"active_handlers": len(self._file_handlers),
"routes": set()
}
try:
for date_dir in self.logs_dir.iterdir():
if not date_dir.is_dir():
continue
try:
datetime.strptime(date_dir.name, "%Y-%m-%d")
stats["total_directories"] += 1
for log_file in date_dir.glob("*.log"):
stats["total_files"] += 1
stats["total_size_bytes"] += log_file.stat().st_size
stats["routes"].add(log_file.stem)
except ValueError:
continue
except Exception as e:
self.logger.error(f"Failed to get log stats: {str(e)}")
stats["total_size_mb"] = round(stats["total_size_bytes"] / (1024 * 1024), 2)
stats["routes"] = list(sorted(stats["routes"]))
return stats
def __del__(self):
"""清理资源"""
try:
# 关闭所有文件处理器
for handler in self._file_handlers.values():
handler.close()
# 关闭线程池
if hasattr(self, '_cleanup_executor'):
self._cleanup_executor.shutdown(wait=False)
except Exception:
pass
class JsonLogFormatter(logging.Formatter):
"""JSON格式的日志格式化器"""
def format(self, record):
# 如果record.msg是字典直接使用
if isinstance(record.msg, dict):
log_data = record.msg.copy()
else:
log_data = {
"timestamp": datetime.now().isoformat(),
"level": record.levelname,
"message": record.getMessage(),
"module": record.module,
"function": record.funcName,
"line": record.lineno
}
# 添加异常信息
if record.exc_info:
log_data["exception"] = self.formatException(record.exc_info)
return json.dumps(log_data, ensure_ascii=False, default=str)
class RouteLogger:
"""路由特定的日志器"""
def __init__(self, manager: AdvancedLoggerManager, route_name: str):
self.manager = manager
self.route_name = route_name
def log_request(self, **kwargs):
"""记录请求日志"""
self.manager.log_request(route_name=self.route_name, **kwargs)
def log_info(self, message: str, **kwargs):
"""记录信息日志"""
log_data = {
"timestamp": datetime.now().isoformat(),
"level": "INFO",
"route": self.route_name,
"message": message,
**kwargs
}
self.manager._write_to_route_log(self.route_name, log_data)
def log_warning(self, message: str, **kwargs):
"""记录警告日志"""
log_data = {
"timestamp": datetime.now().isoformat(),
"level": "WARNING",
"route": self.route_name,
"message": message,
**kwargs
}
self.manager._write_to_route_log(self.route_name, log_data)
def log_error(self, message: str, **kwargs):
"""记录错误日志"""
log_data = {
"timestamp": datetime.now().isoformat(),
"level": "ERROR",
"route": self.route_name,
"message": message,
**kwargs
}
self.manager._write_to_route_log(self.route_name, log_data)
# 同时写入全局错误日志
self.manager._write_to_route_log("errors", log_data)
# 全局高级日志管理器实例
advanced_logger_manager = AdvancedLoggerManager()
def get_route_logger(route_name: str) -> RouteLogger:
"""获取指定路由的日志器"""
return advanced_logger_manager.get_route_logger(route_name)

View File

@@ -0,0 +1,321 @@
import functools
import time
import asyncio
import inspect
from typing import Any, Callable, Dict, Optional, Union
from .logger import get_logger
def log_function_call(
log_level: str = "info",
log_args: bool = True,
log_result: bool = True,
log_execution_time: bool = True,
include_request_id: bool = True,
):
"""函数调用日志装饰器
Args:
log_level: 日志级别
log_args: 是否记录函数参数
log_result: 是否记录返回值
log_execution_time: 是否记录执行时间
include_request_id: 是否包含request_id
"""
def decorator(func: Callable) -> Callable:
logger = get_logger(func.__module__)
if inspect.iscoroutinefunction(func):
@functools.wraps(func)
async def async_wrapper(*args, **kwargs):
func_name = f"{func.__module__}.{func.__qualname__}"
start_time = time.time()
# 构建日志上下文
log_context = {"function": func_name}
# 添加参数到日志
if log_args:
# 过滤敏感参数
safe_kwargs = _filter_sensitive_data(kwargs)
log_context.update({
"args_count": len(args),
"kwargs": safe_kwargs
})
# 记录函数开始执行
getattr(logger, log_level)(
f"开始执行函数: {func_name}",
**log_context
)
try:
# 执行函数
result = await func(*args, **kwargs)
# 记录执行时间
if log_execution_time:
execution_time = time.time() - start_time
log_context["execution_time"] = f"{execution_time:.4f}s"
# 记录返回值
if log_result:
log_context["result"] = _truncate_result(result)
# 记录函数执行成功
getattr(logger, log_level)(
f"函数执行成功: {func_name}",
**log_context
)
return result
except Exception as e:
# 记录执行时间
if log_execution_time:
execution_time = time.time() - start_time
log_context["execution_time"] = f"{execution_time:.4f}s"
# 记录异常
logger.error(
f"函数执行失败: {func_name}",
error=str(e),
error_type=type(e).__name__,
**log_context
)
raise
return async_wrapper
else:
@functools.wraps(func)
def sync_wrapper(*args, **kwargs):
func_name = f"{func.__module__}.{func.__qualname__}"
start_time = time.time()
# 构建日志上下文
log_context = {"function": func_name}
# 添加参数到日志
if log_args:
safe_kwargs = _filter_sensitive_data(kwargs)
log_context.update({
"args_count": len(args),
"kwargs": safe_kwargs
})
# 记录函数开始执行
getattr(logger, log_level)(
f"开始执行函数: {func_name}",
**log_context
)
try:
# 执行函数
result = func(*args, **kwargs)
# 记录执行时间
if log_execution_time:
execution_time = time.time() - start_time
log_context["execution_time"] = f"{execution_time:.4f}s"
# 记录返回值
if log_result:
log_context["result"] = _truncate_result(result)
# 记录函数执行成功
getattr(logger, log_level)(
f"函数执行成功: {func_name}",
**log_context
)
return result
except Exception as e:
# 记录执行时间
if log_execution_time:
execution_time = time.time() - start_time
log_context["execution_time"] = f"{execution_time:.4f}s"
# 记录异常
logger.error(
f"函数执行失败: {func_name}",
error=str(e),
error_type=type(e).__name__,
**log_context
)
raise
return sync_wrapper
return decorator
def log_performance(threshold_ms: float = 1000):
"""性能监控装饰器,当执行时间超过阈值时记录警告
Args:
threshold_ms: 时间阈值(毫秒)
"""
def decorator(func: Callable) -> Callable:
logger = get_logger(func.__module__)
if inspect.iscoroutinefunction(func):
@functools.wraps(func)
async def async_wrapper(*args, **kwargs):
start_time = time.time()
result = await func(*args, **kwargs)
execution_time = (time.time() - start_time) * 1000 # 转换为毫秒
if execution_time > threshold_ms:
logger.warning(
f"函数执行时间超过阈值: {func.__qualname__}",
function=func.__qualname__,
execution_time_ms=f"{execution_time:.2f}",
threshold_ms=threshold_ms
)
return result
return async_wrapper
else:
@functools.wraps(func)
def sync_wrapper(*args, **kwargs):
start_time = time.time()
result = func(*args, **kwargs)
execution_time = (time.time() - start_time) * 1000 # 转换为毫秒
if execution_time > threshold_ms:
logger.warning(
f"函数执行时间超过阈值: {func.__qualname__}",
function=func.__qualname__,
execution_time_ms=f"{execution_time:.2f}",
threshold_ms=threshold_ms
)
return result
return sync_wrapper
return decorator
def retry_on_failure(
max_retries: int = 3,
delay: float = 1.0,
backoff_factor: float = 2.0,
exceptions: tuple = (Exception,),
):
"""重试装饰器
Args:
max_retries: 最大重试次数
delay: 初始延迟时间
backoff_factor: 延迟递增因子
exceptions: 需要重试的异常类型
"""
def decorator(func: Callable) -> Callable:
logger = get_logger(func.__module__)
if inspect.iscoroutinefunction(func):
@functools.wraps(func)
async def async_wrapper(*args, **kwargs):
last_exception = None
current_delay = delay
for attempt in range(max_retries + 1):
try:
return await func(*args, **kwargs)
except exceptions as e:
last_exception = e
if attempt < max_retries:
logger.warning(
f"函数执行失败,{current_delay:.1f}秒后重试: {func.__qualname__}",
function=func.__qualname__,
attempt=attempt + 1,
max_retries=max_retries + 1,
error=str(e),
retry_delay=current_delay
)
await asyncio.sleep(current_delay)
current_delay *= backoff_factor
else:
logger.error(
f"函数重试失败,已达到最大重试次数: {func.__qualname__}",
function=func.__qualname__,
max_retries=max_retries + 1,
final_error=str(e)
)
raise last_exception
return async_wrapper
else:
@functools.wraps(func)
def sync_wrapper(*args, **kwargs):
import time as sync_time
last_exception = None
current_delay = delay
for attempt in range(max_retries + 1):
try:
return func(*args, **kwargs)
except exceptions as e:
last_exception = e
if attempt < max_retries:
logger.warning(
f"函数执行失败,{current_delay:.1f}秒后重试: {func.__qualname__}",
function=func.__qualname__,
attempt=attempt + 1,
max_retries=max_retries + 1,
error=str(e),
retry_delay=current_delay
)
sync_time.sleep(current_delay)
current_delay *= backoff_factor
else:
logger.error(
f"函数重试失败,已达到最大重试次数: {func.__qualname__}",
function=func.__qualname__,
max_retries=max_retries + 1,
final_error=str(e)
)
raise last_exception
return sync_wrapper
return decorator
def _filter_sensitive_data(data: Dict[str, Any]) -> Dict[str, Any]:
"""过滤敏感数据"""
sensitive_keys = {'password', 'token', 'secret', 'key', 'auth'}
filtered_data = {}
for key, value in data.items():
if any(sensitive in key.lower() for sensitive in sensitive_keys):
filtered_data[key] = "***"
else:
filtered_data[key] = value
return filtered_data
def _truncate_result(result: Any, max_length: int = 200) -> str:
"""截断结果字符串"""
result_str = str(result)
if len(result_str) > max_length:
return result_str[:max_length] + "..."
return result_str
# 便捷装饰器
def log_it(func: Callable) -> Callable:
"""简单的函数日志装饰器,使用默认配置"""
return log_function_call()(func)
def time_it(func: Callable) -> Callable:
"""简单的性能监控装饰器"""
return log_performance()(func)

View File

@@ -0,0 +1,345 @@
"""
API文档生成器 - 离线生成Swagger文档
"""
import hashlib
import json
import os
from pathlib import Path
from typing import Optional
from ..utils import get_logger
logger = get_logger(__name__)
# 文档缓存目录
STATIC_DIR = Path("static")
CACHE_FILE = STATIC_DIR / ".openapi_cache"
DOC_FILE = STATIC_DIR / "doc.html"
def get_openapi_hash(openapi_spec: dict) -> str:
"""计算OpenAPI规范的哈希值"""
spec_str = json.dumps(openapi_spec, sort_keys=True, ensure_ascii=False)
return hashlib.md5(spec_str.encode()).hexdigest()
def get_cached_hash() -> Optional[str]:
"""获取缓存的哈希值"""
try:
if CACHE_FILE.exists():
return CACHE_FILE.read_text().strip()
except Exception:
pass
return None
def save_cache_hash(hash_value: str):
"""保存哈希值到缓存"""
try:
CACHE_FILE.write_text(hash_value)
except Exception as e:
logger.warning(f"无法保存文档缓存: {e}")
def generate_doc_html(openapi_spec: dict) -> str:
"""生成文档HTML内容"""
# 将OpenAPI规范内嵌到HTML中
spec_json = json.dumps(openapi_spec, ensure_ascii=False, indent=2)
api_version = openapi_spec.get("info", {}).get("version", "1.0.0")
api_title = openapi_spec.get("info", {}).get("title", "API文档")
html_content = f'''<!DOCTYPE html>
<html lang="zh-CN">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>{api_title} - 接口文档</title>
<!-- 本地Swagger UI资源 -->
<link rel="stylesheet" href="/vendor/swagger-ui.css">
<script src="/vendor/swagger-ui-bundle.js"></script>
<style>
* {{
margin: 0;
padding: 0;
box-sizing: border-box;
}}
body {{
font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, "Helvetica Neue", Arial, sans-serif;
background: #f8fafc;
}}
.nav {{
background: white;
box-shadow: 0 1px 3px rgba(0,0,0,0.1);
height: 64px;
display: flex;
align-items: center;
padding: 0 24px;
position: sticky;
top: 0;
z-index: 100;
}}
.nav-brand {{
display: flex;
align-items: center;
font-size: 20px;
font-weight: 600;
color: #1f2937;
}}
.nav-brand i {{
color: #3b82f6;
margin-right: 8px;
}}
.nav-links {{
display: flex;
margin-left: 24px;
gap: 8px;
}}
.nav-link {{
padding: 8px 12px;
border-radius: 6px;
text-decoration: none;
font-size: 14px;
color: #6b7280;
transition: all 0.2s;
}}
.nav-link:hover {{
background: rgba(59, 130, 246, 0.1);
color: #3b82f6;
}}
.nav-link.active {{
background: rgba(59, 130, 246, 0.1);
color: #3b82f6;
border-bottom: 2px solid #3b82f6;
}}
.nav-right {{
margin-left: auto;
display: flex;
align-items: center;
gap: 16px;
}}
.api-version {{
font-size: 14px;
color: #6b7280;
}}
.api-version span {{
font-weight: 500;
color: #1f2937;
}}
.main {{
padding: 16px 24px;
}}
.doc-container {{
background: white;
border-radius: 8px;
box-shadow: 0 1px 3px rgba(0,0,0,0.1);
min-height: calc(100vh - 120px);
}}
/* Swagger UI 样式覆盖 */
.swagger-ui .topbar {{
display: none;
}}
.swagger-ui .info {{
margin: 20px 0;
}}
.swagger-ui .scheme-container {{
background: #f8fafc;
padding: 15px;
box-shadow: none;
}}
/* Font Awesome 图标 (内联) */
.fa {{
display: inline-block;
font-style: normal;
}}
.fa-file-text-o:before {{ content: "📄"; }}
.fa-file-text:before {{ content: "📝"; }}
.fa-book:before {{ content: "📖"; }}
.fa-home:before {{ content: "🏠"; }}
</style>
</head>
<body>
<nav class="nav">
<div class="nav-brand">
<i class="fa fa-file-text-o"></i>
X-Request 管理系统
</div>
<div class="nav-links">
<a href="/log.html" class="nav-link">
<i class="fa fa-file-text"></i> 日志管理
</a>
<a href="/doc.html" class="nav-link active">
<i class="fa fa-book"></i> 接口文档
</a>
<a href="/" class="nav-link">
<i class="fa fa-home"></i> 首页
</a>
</div>
<div class="nav-right">
<div class="api-version">
API版本: <span>{api_version}</span>
</div>
</div>
</nav>
<main class="main">
<div class="doc-container">
<div id="swagger-ui"></div>
</div>
</main>
<script>
// 内嵌的OpenAPI规范
const spec = {spec_json};
window.onload = function() {{
SwaggerUIBundle({{
spec: spec,
dom_id: '#swagger-ui',
deepLinking: true,
presets: [
SwaggerUIBundle.presets.apis,
SwaggerUIBundle.SwaggerUIStandalonePreset
],
layout: "BaseLayout",
defaultModelsExpandDepth: 1,
defaultModelExpandDepth: 1,
docExpansion: "list",
filter: true,
showExtensions: true,
showCommonExtensions: true
}});
}};
</script>
</body>
</html>'''
return html_content
def generate_loading_html() -> str:
"""生成加载中的HTML页面"""
return '''<!DOCTYPE html>
<html lang="zh-CN">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>生成文档中...</title>
<style>
body {
font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, sans-serif;
display: flex;
justify-content: center;
align-items: center;
min-height: 100vh;
margin: 0;
background: #f8fafc;
}
.loading {
text-align: center;
}
.spinner {
width: 50px;
height: 50px;
border: 4px solid #e5e7eb;
border-top-color: #3b82f6;
border-radius: 50%;
animation: spin 1s linear infinite;
margin: 0 auto 20px;
}
@keyframes spin {
to { transform: rotate(360deg); }
}
h2 {
color: #1f2937;
margin-bottom: 10px;
}
p {
color: #6b7280;
}
</style>
<meta http-equiv="refresh" content="2">
</head>
<body>
<div class="loading">
<div class="spinner"></div>
<h2>正在生成API文档</h2>
<p>请稍候,页面将自动刷新...</p>
</div>
</body>
</html>'''
def should_regenerate(openapi_spec: dict) -> bool:
"""检查是否需要重新生成文档"""
# 检查文档文件是否存在
if not DOC_FILE.exists():
return True
# 检查本地资源是否存在
vendor_dir = STATIC_DIR / "vendor"
if not (vendor_dir / "swagger-ui.css").exists() or not (vendor_dir / "swagger-ui-bundle.js").exists():
return True
# 比较哈希值
current_hash = get_openapi_hash(openapi_spec)
cached_hash = get_cached_hash()
return current_hash != cached_hash
def generate_docs(app) -> bool:
"""
生成API文档
Args:
app: FastAPI应用实例
Returns:
bool: 是否重新生成了文档
"""
try:
# 获取OpenAPI规范
openapi_spec = app.openapi()
# 检查是否需要重新生成
if not should_regenerate(openapi_spec):
logger.info("API文档未变化使用缓存")
return False
logger.info("检测到API变化正在生成文档...")
# 先写入加载页面
DOC_FILE.write_text(generate_loading_html(), encoding='utf-8')
# 生成文档HTML
doc_html = generate_doc_html(openapi_spec)
# 写入文档文件
DOC_FILE.write_text(doc_html, encoding='utf-8')
# 保存哈希值
current_hash = get_openapi_hash(openapi_spec)
save_cache_hash(current_hash)
logger.info("API文档生成完成")
return True
except Exception as e:
logger.error(f"生成API文档失败: {e}")
return False

View File

@@ -0,0 +1,130 @@
from fastapi import HTTPException
from typing import Any, Dict, Optional
class BaseAPIException(Exception):
"""API异常基类"""
def __init__(
self,
message: str,
error_code: Optional[str] = None,
status_code: int = 500,
details: Optional[Dict[str, Any]] = None
):
self.message = message
self.error_code = error_code
self.status_code = status_code
self.details = details or {}
super().__init__(self.message)
class ValidationException(BaseAPIException):
"""数据验证异常"""
def __init__(self, message: str, details: Optional[Dict[str, Any]] = None):
super().__init__(
message=message,
error_code="VALIDATION_ERROR",
status_code=400,
details=details
)
class AuthenticationException(BaseAPIException):
"""认证异常"""
def __init__(self, message: str = "认证失败"):
super().__init__(
message=message,
error_code="AUTHENTICATION_ERROR",
status_code=401
)
class AuthorizationException(BaseAPIException):
"""授权异常"""
def __init__(self, message: str = "权限不足"):
super().__init__(
message=message,
error_code="AUTHORIZATION_ERROR",
status_code=403
)
class NotFoundException(BaseAPIException):
"""资源未找到异常"""
def __init__(self, message: str = "资源未找到", resource: Optional[str] = None):
details = {"resource": resource} if resource else None
super().__init__(
message=message,
error_code="NOT_FOUND",
status_code=404,
details=details
)
class ConflictException(BaseAPIException):
"""资源冲突异常"""
def __init__(self, message: str = "资源冲突", details: Optional[Dict[str, Any]] = None):
super().__init__(
message=message,
error_code="CONFLICT",
status_code=409,
details=details
)
class RateLimitException(BaseAPIException):
"""限流异常"""
def __init__(self, message: str = "请求频率过高", retry_after: Optional[int] = None):
details = {"retry_after": retry_after} if retry_after else None
super().__init__(
message=message,
error_code="RATE_LIMIT_EXCEEDED",
status_code=429,
details=details
)
class ExternalServiceException(BaseAPIException):
"""外部服务异常"""
def __init__(
self,
message: str = "外部服务调用失败",
service_name: Optional[str] = None,
details: Optional[Dict[str, Any]] = None
):
if service_name:
details = details or {}
details["service_name"] = service_name
super().__init__(
message=message,
error_code="EXTERNAL_SERVICE_ERROR",
status_code=502,
details=details
)
class BusinessException(BaseAPIException):
"""业务逻辑异常"""
def __init__(
self,
message: str,
business_code: Optional[str] = None,
details: Optional[Dict[str, Any]] = None
):
error_code = f"BUSINESS_ERROR_{business_code}" if business_code else "BUSINESS_ERROR"
super().__init__(
message=message,
error_code=error_code,
status_code=422,
details=details
)

View File

@@ -0,0 +1,104 @@
"""
高级日志系统使用示例
演示如何在API中使用新的日志系统
"""
from datetime import datetime
from ..utils.advanced_logger import get_route_logger, advanced_logger_manager
def example_hello_logging():
"""Hello API日志使用示例"""
route_logger = get_route_logger("hello")
# 记录一般信息
route_logger.log_info("Hello API 被调用", user_id="12345")
# 记录请求
route_logger.log_request(
method="GET",
path="/api/hello",
status_code=200,
process_time=0.05,
client_ip="127.0.0.1",
request_id="req-123",
user_agent="Mozilla/5.0...",
query_params={"name": "张三"}
)
# 记录警告
route_logger.log_warning("用户频繁调用API", user_id="12345", call_count=10)
# 记录错误
route_logger.log_error("处理请求失败", error="数据库连接超时", user_id="12345")
def example_user_logging():
"""User API日志使用示例"""
route_logger = get_route_logger("user")
# 记录用户创建
route_logger.log_info("用户注册成功", user_id="12345", email="user@example.com")
# 记录登录请求
route_logger.log_request(
method="POST",
path="/api/user/login",
status_code=200,
process_time=0.12,
client_ip="192.168.1.100",
request_id="req-456",
query_params={},
request_body={"username": "admin", "password": "***"} # 敏感信息已过滤
)
def example_get_log_stats():
"""获取日志统计信息示例"""
stats = advanced_logger_manager.get_log_stats()
print("日志统计信息:")
print(f"- 总目录数: {stats['total_directories']}")
print(f"- 总文件数: {stats['total_files']}")
print(f"- 总大小: {stats['total_size_mb']} MB")
print(f"- 活跃处理器数: {stats['active_handlers']}")
print(f"- 路由列表: {', '.join(stats['routes'])}")
def example_cleanup_logs():
"""日志清理示例"""
result = advanced_logger_manager.cleanup_old_logs(days=7)
print("日志清理结果:")
print(f"- 删除目录数: {result['deleted_dirs']}")
print(f"- 删除文件数: {result['deleted_files']}")
print(f"- 释放空间: {result['freed_mb']} MB")
if result['errors']:
print("错误信息:")
for error in result['errors']:
print(f" - {error}")
if __name__ == "__main__":
# 运行示例
print("=== 高级日志系统使用示例 ===\n")
print("1. 模拟Hello API调用...")
example_hello_logging()
print("\n2. 模拟User API调用...")
example_user_logging()
print("\n3. 获取日志统计信息...")
example_get_log_stats()
print("\n4. 示例完成!")
print("你可以通过以下API端点查看日志管理功能:")
print("- GET /logs/stats - 获取日志统计")
print("- GET /logs/directories - 获取日志目录列表")
print("- GET /logs/search - 搜索日志内容")
print("- GET /logs/cleanup - 清理过期日志")
print("- GET /logs/files/{date}/{route_name} - 查看指定日志文件")
print("- GET /logs/download/{date}/{route_name} - 下载指定日志文件")

228
request/src/utils/logger.py Normal file
View File

@@ -0,0 +1,228 @@
import structlog
import logging
import sys
import json
from datetime import datetime
from typing import Any, Dict, Optional
from pathlib import Path
import colorama
from colorama import Fore, Back, Style
# 尝试导入pythonjsonlogger如果没有安装则使用备用方案
try:
from pythonjsonlogger import jsonlogger
HAS_JSON_LOGGER = True
except ImportError:
HAS_JSON_LOGGER = False
jsonlogger = None
# 初始化colorama
colorama.init()
class ColoredConsoleRenderer:
"""带颜色的控制台日志渲染器"""
def __call__(self, logger, method_name: str, event_dict: Dict[str, Any]) -> str:
"""渲染日志事件"""
timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
level = event_dict.get("level", "INFO").upper()
message = event_dict.get("event", "")
# 根据日志级别选择颜色
level_colors = {
"DEBUG": Fore.CYAN,
"INFO": Fore.GREEN,
"WARNING": Fore.YELLOW,
"ERROR": Fore.RED,
"CRITICAL": Fore.RED + Back.WHITE + Style.BRIGHT,
}
color = level_colors.get(level, "")
reset = Style.RESET_ALL
# 基础信息
log_line = f"{color}[{timestamp}] {level}{reset} {message}"
# 添加额外的上下文信息
if "request_id" in event_dict:
log_line += f" {Fore.BLUE}[req:{event_dict['request_id']}]{reset}"
if "function" in event_dict:
log_line += f" {Fore.MAGIC}{event_dict['function']}(){reset}"
# 添加其他字段
for key, value in event_dict.items():
if key not in ["level", "event", "timestamp", "request_id", "function"]:
log_line += f" {Fore.CYAN}{key}={value}{reset}"
return log_line
class JSONRenderer:
"""JSON格式的日志渲染器"""
def __call__(self, logger, method_name: str, event_dict: Dict[str, Any]) -> str:
"""渲染日志事件为JSON格式"""
log_data = {
"timestamp": datetime.now().isoformat(),
"level": event_dict.get("level", "INFO"),
"message": event_dict.get("event", ""),
}
# 添加其他字段
for key, value in event_dict.items():
if key not in ["level", "event"]:
log_data[key] = value
return json.dumps(log_data, ensure_ascii=False, default=str)
class LoggerManager:
"""日志管理器"""
def __init__(self):
self._processors = []
self._configured = False
def configure(self,
log_level: str = "INFO",
log_format: str = "console",
log_file: Optional[str] = None,
log_to_console: bool = True):
"""配置日志系统"""
# 配置structlog处理器
processors = [
structlog.stdlib.filter_by_level,
structlog.stdlib.add_logger_name,
structlog.stdlib.add_log_level,
structlog.stdlib.PositionalArgumentsFormatter(),
structlog.processors.TimeStamper(fmt="iso"),
structlog.processors.StackInfoRenderer(),
structlog.processors.format_exc_info,
]
# 添加自定义处理器
processors.extend(self._processors)
# 选择渲染器
if log_format == "json":
renderer = JSONRenderer()
else:
renderer = ColoredConsoleRenderer()
processors.append(renderer)
# 配置structlog
structlog.configure(
processors=processors,
wrapper_class=structlog.stdlib.BoundLogger,
logger_factory=structlog.stdlib.LoggerFactory(),
cache_logger_on_first_use=True,
)
# 配置标准库logging
# 防止重复配置
root_logger = logging.getLogger()
if not root_logger.handlers:
level = getattr(logging, log_level.upper())
# 如果指定了日志文件,配置文件日志
if log_file:
self._setup_file_handler(log_file, log_level)
# 如果允许控制台输出,配置控制台日志
if log_to_console:
console_handler = logging.StreamHandler(sys.stdout)
console_handler.setLevel(level)
console_handler.setFormatter(logging.Formatter("%(message)s"))
root_logger.addHandler(console_handler)
else:
# 如果不输出到控制台,至少设置日志级别
root_logger.setLevel(level)
self._configured = True
def _setup_file_handler(self, log_file: str, log_level: str = "INFO"):
"""设置文件日志处理器"""
log_path = Path(log_file)
log_path.parent.mkdir(parents=True, exist_ok=True)
file_handler = logging.FileHandler(log_file, encoding='utf-8')
level = getattr(logging, log_level.upper())
file_handler.setLevel(level)
if HAS_JSON_LOGGER:
file_handler.setFormatter(jsonlogger.JsonFormatter(
'%(asctime)s %(name)s %(levelname)s %(message)s'
))
else:
# 使用标准库的JSON格式化器
file_handler.setFormatter(logging.Formatter(
'%(asctime)s - %(name)s - %(levelname)s - %(message)s'
))
# 获取根logger并添加文件处理器
root_logger = logging.getLogger()
root_logger.addHandler(file_handler)
root_logger.setLevel(level)
def add_processor(self, processor):
"""添加自定义处理器"""
self._processors.append(processor)
def get_logger(self, name: Optional[str] = None) -> structlog.stdlib.BoundLogger:
"""获取logger实例"""
if not self._configured:
self.configure()
return structlog.get_logger(name)
# 全局日志管理器实例
logger_manager = LoggerManager()
# 获取logger的便捷函数
def get_logger(name: Optional[str] = None) -> structlog.stdlib.BoundLogger:
"""获取logger实例"""
return logger_manager.get_logger(name)
# 简单的日志打印函数
def log(message: str, level: str = "info", **kwargs):
"""简单的日志打印函数
Args:
message: 日志消息
level: 日志级别 (debug, info, warning, error, critical)
**kwargs: 额外的上下文信息
"""
logger = get_logger()
log_method = getattr(logger, level.lower(), logger.info)
log_method(message, **kwargs)
# 带上下文的日志函数
def log_debug(message: str, **kwargs):
"""打印debug级别日志"""
log(message, "debug", **kwargs)
def log_info(message: str, **kwargs):
"""打印info级别日志"""
log(message, "info", **kwargs)
def log_warning(message: str, **kwargs):
"""打印warning级别日志"""
log(message, "warning", **kwargs)
def log_error(message: str, **kwargs):
"""打印error级别日志"""
log(message, "error", **kwargs)
def log_critical(message: str, **kwargs):
"""打印critical级别日志"""
log(message, "critical", **kwargs)

View File

@@ -0,0 +1,368 @@
"""
统一日志管理器 - 整合基础日志和高级日志系统
提供简单易用的统一接口,同时保持向后兼容
"""
import os
import sys
import json
import logging
import time
import threading
from datetime import datetime
from typing import Dict, Any, Optional, Union
from pathlib import Path
from contextlib import contextmanager
try:
import structlog
from pythonjsonlogger import jsonlogger
STRUCTLOG_AVAILABLE = True
except ImportError:
STRUCTLOG_AVAILABLE = False
from .logger import LoggerManager, ColoredConsoleRenderer, JSONRenderer
class UnifiedLoggerManager:
"""统一日志管理器 - 整合所有日志功能"""
_instance = None
_lock = threading.Lock()
def __new__(cls):
"""单例模式"""
if cls._instance is None:
with cls._lock:
if cls._instance is None:
cls._instance = super().__new__(cls)
return cls._instance
def __init__(self):
if not hasattr(self, '_initialized'):
self._initialized = True
self._config = {}
self._loggers = {}
self._route_loggers = {}
self._file_handlers = {}
self._setup_default_config()
def _setup_default_config(self):
"""设置默认配置"""
self._config = {
# 基础配置
'level': 'INFO',
'format': 'json', # 'json' 或 'console'
'console_output': True,
# 高级配置
'advanced_mode': True,
'route_based_logging': True,
'logs_dir': 'logs',
'max_log_days': 30,
'enable_cleanup': True,
# 文件配置
'file_rotation': False,
'max_file_size': '10MB',
'backup_count': 5,
# 性能配置
'async_write': True,
'buffer_size': 100,
'flush_interval': 5,
}
def configure(self, **kwargs):
"""配置日志系统"""
self._config.update(kwargs)
# 重新配置基础日志管理器
if hasattr(self, '_logger_manager'):
self._logger_manager.configure(
level=self._config['level'],
format=self._config['format'],
to_file=bool(self._config.get('log_file')),
to_console=self._config['console_output']
)
def get_logger(self, name: str = None, route_name: str = None) -> 'UnifiedLogger':
"""获取统一的日志记录器
Args:
name: 日志记录器名称
route_name: 路由名称(用于路由专用日志)
Returns:
UnifiedLogger: 统一日志记录器实例
"""
key = f"{name}:{route_name}" if route_name else name
if key not in self._loggers:
logger = UnifiedLogger(
name=name or 'default',
route_name=route_name,
config=self._config
)
self._loggers[key] = logger
return self._loggers[key]
def get_route_logger(self, route_name: str) -> 'UnifiedLogger':
"""获取路由专用日志记录器"""
return self.get_logger(route_name=route_name)
def cleanup_logs(self, days: int = None) -> Dict[str, Any]:
"""清理过期日志文件"""
days = days or self._config['max_log_days']
if not self._config['enable_cleanup']:
return {'status': 'disabled', 'message': 'Log cleanup is disabled'}
logs_dir = Path(self._config['logs_dir'])
if not logs_dir.exists():
return {'status': 'success', 'deleted_count': 0, 'deleted_files': []}
deleted_files = []
cutoff_time = time.time() - (days * 24 * 3600)
for log_file in logs_dir.rglob('*.log'):
try:
if log_file.stat().st_mtime < cutoff_time:
log_file.unlink()
deleted_files.append(str(log_file))
except Exception as e:
logging.warning(f"Failed to delete log file {log_file}: {e}")
return {
'status': 'success',
'deleted_count': len(deleted_files),
'deleted_files': deleted_files
}
def get_log_stats(self) -> Dict[str, Any]:
"""获取日志统计信息"""
logs_dir = Path(self._config['logs_dir'])
stats = {
'total_files': 0,
'total_size': 0,
'by_date': {},
'by_route': {}
}
if not logs_dir.exists():
return stats
for log_file in logs_dir.rglob('*.log'):
try:
file_stat = log_file.stat()
file_size = file_stat.st_size
stats['total_files'] += 1
stats['total_size'] += file_size
# 按日期统计
date_dir = log_file.parent.name
if date_dir not in stats['by_date']:
stats['by_date'][date_dir] = {'files': 0, 'size': 0}
stats['by_date'][date_dir]['files'] += 1
stats['by_date'][date_dir]['size'] += file_size
# 按路由统计
route_name = log_file.stem.split('_')[0] # 去掉 _success 或 _error
if route_name not in stats['by_route']:
stats['by_route'][route_name] = {'files': 0, 'size': 0}
stats['by_route'][route_name]['files'] += 1
stats['by_route'][route_name]['size'] += file_size
except Exception as e:
logging.warning(f"Failed to stat log file {log_file}: {e}")
return stats
def list_log_files(self, date: str = None) -> Dict[str, Any]:
"""列出日志文件"""
logs_dir = Path(self._config['logs_dir'])
files = []
search_dir = logs_dir / date if date else logs_dir
if not search_dir.exists():
return {'date': date, 'files': []}
for log_file in search_dir.glob('*.log'):
try:
file_stat = log_file.stat()
files.append({
'name': log_file.name,
'path': str(log_file.relative_to(logs_dir)),
'size': file_stat.st_size,
'modified': datetime.fromtimestamp(file_stat.st_mtime).isoformat()
})
except Exception as e:
logging.warning(f"Failed to read log file info {log_file}: {e}")
files.sort(key=lambda x: x['name'])
return {'date': date or 'all', 'files': files}
class UnifiedLogger:
"""统一日志记录器 - 提供一致的日志接口"""
def __init__(self, name: str, route_name: str = None, config: Dict[str, Any] = None):
self.name = name
self.route_name = route_name
self.config = config or {}
# 根据配置选择底层日志系统
if self.config.get('advanced_mode', True):
self._init_advanced_logger()
else:
self._init_basic_logger()
def _init_advanced_logger(self):
"""初始化高级日志系统"""
if self.config.get('route_based_logging', True) and self.route_name:
from .advanced_logger import advanced_logger_manager
self._logger = advanced_logger_manager.get_route_logger(self.route_name)
else:
# 创建高级日志记录器
if STRUCTLOG_AVAILABLE:
structlog.configure(
processors=[
structlog.stdlib.filter_by_level,
structlog.stdlib.add_logger_name,
structlog.stdlib.add_log_level,
structlog.stdlib.PositionalArgumentsFormatter(),
structlog.processors.TimeStamper(fmt="iso"),
structlog.processors.StackInfoRenderer(),
structlog.processors.format_exc_info,
ColoredConsoleRenderer() if self.config.get('console_output') else JSONRenderer()
],
context_class=dict,
logger_factory=structlog.stdlib.LoggerFactory(),
wrapper_class=structlog.stdlib.BoundLogger,
cache_logger_on_first_use=True,
)
self._logger = structlog.get_logger(self.name)
else:
self._init_basic_logger()
def _init_basic_logger(self):
"""初始化基础日志系统"""
self._logger_manager = LoggerManager()
self._logger = self._logger_manager.get_logger(self.name)
def debug(self, message: str, **kwargs):
"""记录调试日志"""
self._log('debug', message, **kwargs)
def info(self, message: str, **kwargs):
"""记录信息日志"""
self._log('info', message, **kwargs)
def warning(self, message: str, **kwargs):
"""记录警告日志"""
self._log('warning', message, **kwargs)
def error(self, message: str, **kwargs):
"""记录错误日志"""
self._log('error', message, **kwargs)
def critical(self, message: str, **kwargs):
"""记录严重错误日志"""
self._log('critical', message, **kwargs)
def _log(self, level: str, message: str, **kwargs):
"""内部日志记录方法"""
if hasattr(self._logger, level):
if kwargs:
getattr(self._logger, level)(message, **kwargs)
else:
getattr(self._logger, level)(message)
else:
# 降级处理
print(f"[{level.upper()}] {message}", file=sys.stderr if level in ['error', 'critical'] else sys.stdout)
def log_request(self, method: str, path: str, request_id: str = None, **kwargs):
"""记录请求日志"""
message = f"{method} {path}"
extra = {
'event_type': 'request',
'method': method,
'path': path,
'request_id': request_id,
**kwargs
}
self.info(message, **extra)
def log_response(self, status_code: int, process_time: float = None, request_id: str = None, **kwargs):
"""记录响应日志"""
level = 'info' if status_code < 400 else 'warning' if status_code < 500 else 'error'
message = f"Response {status_code}"
extra = {
'event_type': 'response',
'status_code': status_code,
'process_time_ms': round(process_time * 1000, 2) if process_time else None,
'request_id': request_id,
**kwargs
}
self._log(level, message, **extra)
def log_exception(self, exception: Exception, **kwargs):
"""记录异常日志"""
message = f"Exception: {str(exception)}"
extra = {
'event_type': 'exception',
'exception_type': type(exception).__name__,
'exception_message': str(exception),
**kwargs
}
self.error(message, **extra)
@contextmanager
def log_context(self, **context):
"""日志上下文管理器"""
if hasattr(self._logger, 'bind'):
bound_logger = self._logger.bind(**context)
original_logger = self._logger
self._logger = bound_logger
try:
yield self
finally:
self._logger = original_logger
else:
# 基础日志系统的降级处理
yield self
# 全局统一日志管理器实例
unified_logger_manager = UnifiedLoggerManager()
# 便捷函数
def get_logger(name: str = None, route_name: str = None) -> UnifiedLogger:
"""获取统一日志记录器的便捷函数"""
return unified_logger_manager.get_logger(name=name, route_name=route_name)
def get_route_logger(route_name: str) -> UnifiedLogger:
"""获取路由日志记录器的便捷函数"""
return unified_logger_manager.get_route_logger(route_name)
def configure_logging(**kwargs):
"""配置日志系统的便捷函数"""
unified_logger_manager.configure(**kwargs)
def cleanup_logs(days: int = None) -> Dict[str, Any]:
"""清理日志的便捷函数"""
return unified_logger_manager.cleanup_logs(days=days)
def get_log_stats() -> Dict[str, Any]:
"""获取日志统计的便捷函数"""
return unified_logger_manager.get_log_stats()
# 向后兼容的别名
logger_manager = unified_logger_manager
log_manager = unified_logger_manager

99
request/start.sh Normal file
View File

@@ -0,0 +1,99 @@
#!/usr/bin/env bash
set -euo pipefail
echo "🚀 启动 X-Request 高性能 FastAPI 框架"
# 永远从脚本所在目录运行(避免在别的目录执行导致找不到 venv / requirements / .env
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
cd "$SCRIPT_DIR"
mkdir -p logs
PID_FILE="logs/xrequest.pid"
LOG_FILE="logs/xrequest.server.log"
# 检查虚拟环境是否存在
if [ ! -d "xrequest" ]; then
echo "❌ 虚拟环境不存在,请先运行:"
echo " ./setup.sh"
exit 1
fi
# 选择虚拟环境里的 Python不要依赖 source activate也不要回退系统 Python
VENV_PY=""
if [ -f "xrequest/Scripts/python.exe" ]; then
VENV_PY="xrequest/Scripts/python.exe"
elif [ -f "xrequest/bin/python" ]; then
VENV_PY="xrequest/bin/python"
fi
if [ -z "$VENV_PY" ]; then
echo "❌ 未找到虚拟环境 Python可尝试重新创建虚拟环境"
echo " rm -rf xrequest && ./setup.sh"
exit 1
fi
echo "✅ 使用虚拟环境 Python: $VENV_PY"
# 检查 .env 文件是否存在
if [ ! -f ".env" ]; then
echo "❌ 环境配置文件 .env 不存在,请先运行:"
echo " ./setup.sh"
exit 1
fi
# 读取 .env 文件中的配置
PORT="8000" # 默认端口
HOST="0.0.0.0" # 默认主机
# 解析 .env 文件(简单解析 PORT 和 HOST
if [ -f ".env" ]; then
# 使用 grep 和 sed 来提取 PORT 和 HOST忽略注释
PORT=$(grep -v '^#' .env | grep '^PORT=' | head -1 | cut -d= -f2 | tr -d '"' || echo "8000")
HOST=$(grep -v '^#' .env | grep '^HOST=' | head -1 | cut -d= -f2 | tr -d '"' || echo "0.0.0.0")
fi
echo "📋 使用配置: HOST=$HOST, PORT=$PORT"
# 快速校验关键依赖是否可导入(避免跑到一半才 ModuleNotFoundError
if ! "$VENV_PY" -c "import uvicorn" >/dev/null 2>&1; then
echo "❌ 虚拟环境缺少 uvicorn或未正确安装依赖。请运行"
echo " ./setup.sh"
echo " 或手动安装:$VENV_PY -m pip install -r requirements.txt"
exit 1
fi
if [ -f "$PID_FILE" ]; then
OLD_PID="$(cat "$PID_FILE" 2>/dev/null || true)"
if [ -n "$OLD_PID" ] && kill -0 "$OLD_PID" >/dev/null 2>&1; then
echo "✅ X-Request 已在后台运行 (pid=$OLD_PID)"
echo "📄 日志: $LOG_FILE"
exit 0
fi
rm -f "$PID_FILE" || true
fi
echo "🎯 启动应用服务..."
echo "📚 API文档地址: http://localhost:$PORT/docs"
echo "🏥 健康检查: http://localhost:$PORT/health"
echo "📊 应用信息: http://localhost:$PORT/info"
echo "⏹️ 停止服务: ./stop.sh"
echo "📄 日志: $LOG_FILE"
echo ""
nohup "$VENV_PY" "main.py" >>"$LOG_FILE" 2>&1 &
PID_NUM=$!
echo $PID_NUM > "$PID_FILE"
sleep 0.5 # 等待PID文件写入
if [ -f "$PID_FILE" ]; then
SAVED_PID=$(cat "$PID_FILE")
echo "✅ 已在后台启动 (pid=$SAVED_PID)"
else
echo "⚠️ 启动成功但PID文件创建失败使用进程号: $PID_NUM"
echo $PID_NUM > "$PID_FILE"
echo "✅ 已在后台启动 (pid=$PID_NUM)"
fi
echo " 查看日志: tail -f $LOG_FILE"
echo " 停止服务: ./stop.sh"

View File

@@ -0,0 +1 @@
0dc715b603074fca864c0c5074317e6a

799
request/static/doc.html Normal file
View File

@@ -0,0 +1,799 @@
<!DOCTYPE html>
<html lang="zh-CN">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>X-Request API Framework - 接口文档</title>
<!-- 本地Swagger UI资源 -->
<link rel="stylesheet" href="/vendor/swagger-ui.css">
<script src="/vendor/swagger-ui-bundle.js"></script>
<style>
* {
margin: 0;
padding: 0;
box-sizing: border-box;
}
body {
font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, "Helvetica Neue", Arial, sans-serif;
background: #f8fafc;
}
.nav {
background: white;
box-shadow: 0 1px 3px rgba(0,0,0,0.1);
height: 64px;
display: flex;
align-items: center;
padding: 0 24px;
position: sticky;
top: 0;
z-index: 100;
}
.nav-brand {
display: flex;
align-items: center;
font-size: 20px;
font-weight: 600;
color: #1f2937;
}
.nav-brand i {
color: #3b82f6;
margin-right: 8px;
}
.nav-links {
display: flex;
margin-left: 24px;
gap: 8px;
}
.nav-link {
padding: 8px 12px;
border-radius: 6px;
text-decoration: none;
font-size: 14px;
color: #6b7280;
transition: all 0.2s;
}
.nav-link:hover {
background: rgba(59, 130, 246, 0.1);
color: #3b82f6;
}
.nav-link.active {
background: rgba(59, 130, 246, 0.1);
color: #3b82f6;
border-bottom: 2px solid #3b82f6;
}
.nav-right {
margin-left: auto;
display: flex;
align-items: center;
gap: 16px;
}
.api-version {
font-size: 14px;
color: #6b7280;
}
.api-version span {
font-weight: 500;
color: #1f2937;
}
.main {
padding: 16px 24px;
}
.doc-container {
background: white;
border-radius: 8px;
box-shadow: 0 1px 3px rgba(0,0,0,0.1);
min-height: calc(100vh - 120px);
}
/* Swagger UI 样式覆盖 */
.swagger-ui .topbar {
display: none;
}
.swagger-ui .info {
margin: 20px 0;
}
.swagger-ui .scheme-container {
background: #f8fafc;
padding: 15px;
box-shadow: none;
}
/* Font Awesome 图标 (内联) */
.fa {
display: inline-block;
font-style: normal;
}
.fa-file-text-o:before { content: "📄"; }
.fa-file-text:before { content: "📝"; }
.fa-book:before { content: "📖"; }
.fa-home:before { content: "🏠"; }
</style>
</head>
<body>
<nav class="nav">
<div class="nav-brand">
<i class="fa fa-file-text-o"></i>
X-Request 管理系统
</div>
<div class="nav-links">
<a href="/log.html" class="nav-link">
<i class="fa fa-file-text"></i> 日志管理
</a>
<a href="/doc.html" class="nav-link active">
<i class="fa fa-book"></i> 接口文档
</a>
<a href="/" class="nav-link">
<i class="fa fa-home"></i> 首页
</a>
</div>
<div class="nav-right">
<div class="api-version">
API版本: <span>1.0.0</span>
</div>
</div>
</nav>
<main class="main">
<div class="doc-container">
<div id="swagger-ui"></div>
</div>
</main>
<script>
// 内嵌的OpenAPI规范
const spec = {
"openapi": "3.1.0",
"info": {
"title": "X-Request API Framework",
"description": "高性能、高并发的请求框架",
"version": "1.0.0"
},
"paths": {
"/": {
"get": {
"summary": "Root",
"description": "根路径 - 重定向到前端监控界面",
"operationId": "root__get",
"responses": {
"200": {
"description": "Successful Response",
"content": {
"application/json": {
"schema": {}
}
}
}
}
}
},
"/dashboard": {
"get": {
"summary": "Dashboard",
"description": "前端监控界面",
"operationId": "dashboard_dashboard_get",
"responses": {
"200": {
"description": "Successful Response",
"content": {
"application/json": {
"schema": {}
}
}
}
}
}
},
"/health": {
"get": {
"summary": "Health Check",
"description": "健康检查",
"operationId": "health_check_health_get",
"responses": {
"200": {
"description": "Successful Response",
"content": {
"application/json": {
"schema": {}
}
}
}
}
}
},
"/info": {
"get": {
"summary": "App Info",
"description": "应用信息",
"operationId": "app_info_info_get",
"responses": {
"200": {
"description": "Successful Response",
"content": {
"application/json": {
"schema": {}
}
}
}
}
}
},
"/routes": {
"get": {
"summary": "Get Routes Info",
"description": "获取所有路由信息",
"operationId": "get_routes_info_routes_get",
"responses": {
"200": {
"description": "Successful Response",
"content": {
"application/json": {
"schema": {}
}
}
}
}
}
},
"/monitoring/logs/batch/delete": {
"post": {
"tags": [
"Monitoring"
],
"summary": "批量删除日志文件",
"description": "批量删除日志文件\n\nArgs:\n request: 批量删除请求,包含日志文件路径列表",
"operationId": "batch_delete_logs_monitoring_logs_batch_delete_post",
"requestBody": {
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/BatchDeleteRequest"
}
}
},
"required": true
},
"responses": {
"200": {
"description": "Successful Response",
"content": {
"application/json": {
"schema": {}
}
}
},
"422": {
"description": "Validation Error",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/HTTPValidationError"
}
}
}
}
}
}
},
"/monitoring/logs/download": {
"post": {
"tags": [
"Monitoring"
],
"summary": "批量下载日志文件",
"description": "批量下载日志文件\n\nArgs:\n request: 批量下载请求,包含日志文件路径列表",
"operationId": "batch_download_logs_monitoring_logs_download_post",
"requestBody": {
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/BatchDownloadRequest"
}
}
},
"required": true
},
"responses": {
"200": {
"description": "Successful Response",
"content": {
"application/json": {
"schema": {}
}
}
},
"422": {
"description": "Validation Error",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/HTTPValidationError"
}
}
}
}
}
}
},
"/monitoring/logs/{log_name}": {
"delete": {
"tags": [
"Monitoring"
],
"summary": "删除单个日志文件",
"description": "删除单个日志文件\n\nArgs:\n log_name: 日志文件名支持带日期路径如YYYY-MM-DD/filename.log",
"operationId": "delete_log_monitoring_logs__log_name__delete",
"parameters": [
{
"name": "log_name",
"in": "path",
"required": true,
"schema": {
"type": "string",
"title": "Log Name"
}
}
],
"responses": {
"200": {
"description": "Successful Response",
"content": {
"application/json": {
"schema": {}
}
}
},
"422": {
"description": "Validation Error",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/HTTPValidationError"
}
}
}
}
}
},
"get": {
"tags": [
"Monitoring"
],
"summary": "获取日志内容",
"description": "获取日志文件内容\n\nArgs:\n log_name: 日志文件名\n lines: 返回的行数默认100行",
"operationId": "get_log_content_monitoring_logs__log_name__get",
"parameters": [
{
"name": "log_name",
"in": "path",
"required": true,
"schema": {
"type": "string",
"title": "Log Name"
}
},
{
"name": "lines",
"in": "query",
"required": false,
"schema": {
"type": "integer",
"default": 100,
"title": "Lines"
}
}
],
"responses": {
"200": {
"description": "Successful Response",
"content": {
"application/json": {
"schema": {}
}
}
},
"422": {
"description": "Validation Error",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/HTTPValidationError"
}
}
}
}
}
}
},
"/monitoring/logs/{date}/{log_name}": {
"delete": {
"tags": [
"Monitoring"
],
"summary": "删除指定日期的单个日志文件",
"description": "删除指定日期的单个日志文件\n\nArgs:\n date: 日期格式为YYYY-MM-DD\n log_name: 日志文件名",
"operationId": "delete_log_by_date_monitoring_logs__date___log_name__delete",
"parameters": [
{
"name": "date",
"in": "path",
"required": true,
"schema": {
"type": "string",
"title": "Date"
}
},
{
"name": "log_name",
"in": "path",
"required": true,
"schema": {
"type": "string",
"title": "Log Name"
}
}
],
"responses": {
"200": {
"description": "Successful Response",
"content": {
"application/json": {
"schema": {}
}
}
},
"422": {
"description": "Validation Error",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/HTTPValidationError"
}
}
}
}
}
},
"get": {
"tags": [
"Monitoring"
],
"summary": "获取指定日期的日志内容",
"description": "获取指定日期的日志内容\n\nArgs:\n date: 日期格式为YYYY-MM-DD\n log_name: 日志文件名\n entries: 返回的日志条目数量默认10个\n mode: 显示模式latest表示最新的N个条目显示在顶部oldest表示最早的N个条目显示在顶部默认latest",
"operationId": "get_log_content_by_date_monitoring_logs__date___log_name__get",
"parameters": [
{
"name": "date",
"in": "path",
"required": true,
"schema": {
"type": "string",
"title": "Date"
}
},
{
"name": "log_name",
"in": "path",
"required": true,
"schema": {
"type": "string",
"title": "Log Name"
}
},
{
"name": "entries",
"in": "query",
"required": false,
"schema": {
"type": "integer",
"default": 10,
"title": "Entries"
}
},
{
"name": "mode",
"in": "query",
"required": false,
"schema": {
"type": "string",
"default": "latest",
"title": "Mode"
}
}
],
"responses": {
"200": {
"description": "Successful Response",
"content": {
"application/json": {
"schema": {}
}
}
},
"422": {
"description": "Validation Error",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/HTTPValidationError"
}
}
}
}
}
}
},
"/monitoring/logs/{log_name}/download": {
"get": {
"tags": [
"Monitoring"
],
"summary": "下载日志文件",
"description": "下载日志文件\n\nArgs:\n log_name: 日志文件名支持带日期路径如YYYY-MM-DD/filename.log",
"operationId": "download_log_monitoring_logs__log_name__download_get",
"parameters": [
{
"name": "log_name",
"in": "path",
"required": true,
"schema": {
"type": "string",
"title": "Log Name"
}
}
],
"responses": {
"200": {
"description": "Successful Response",
"content": {
"application/json": {
"schema": {}
}
}
},
"422": {
"description": "Validation Error",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/HTTPValidationError"
}
}
}
}
}
}
},
"/monitoring/logs/{date}/{log_name}/download": {
"get": {
"tags": [
"Monitoring"
],
"summary": "下载指定日期的日志文件",
"description": "下载指定日期的日志文件\n\nArgs:\n date: 日期格式为YYYY-MM-DD\n log_name: 日志文件名",
"operationId": "download_log_by_date_monitoring_logs__date___log_name__download_get",
"parameters": [
{
"name": "date",
"in": "path",
"required": true,
"schema": {
"type": "string",
"title": "Date"
}
},
{
"name": "log_name",
"in": "path",
"required": true,
"schema": {
"type": "string",
"title": "Log Name"
}
}
],
"responses": {
"200": {
"description": "Successful Response",
"content": {
"application/json": {
"schema": {}
}
}
},
"422": {
"description": "Validation Error",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/HTTPValidationError"
}
}
}
}
}
}
},
"/monitoring/logs": {
"get": {
"tags": [
"Monitoring"
],
"summary": "获取日志列表",
"description": "获取日志列表,支持按时间分类\n\nArgs:\n date: 日期格式为YYYY-MM-DD如不提供则返回所有日期文件夹",
"operationId": "get_logs_list_monitoring_logs_get",
"parameters": [
{
"name": "date",
"in": "query",
"required": false,
"schema": {
"type": "string",
"title": "Date"
}
}
],
"responses": {
"200": {
"description": "Successful Response",
"content": {
"application/json": {
"schema": {}
}
}
},
"422": {
"description": "Validation Error",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/HTTPValidationError"
}
}
}
}
}
}
},
"/monitoring/status": {
"get": {
"tags": [
"Monitoring"
],
"summary": "获取服务器状态",
"description": "获取服务器基本状态信息",
"operationId": "get_server_status_monitoring_status_get",
"responses": {
"200": {
"description": "Successful Response",
"content": {
"application/json": {
"schema": {}
}
}
}
}
}
}
},
"components": {
"schemas": {
"BatchDeleteRequest": {
"properties": {
"files": {
"items": {
"type": "string"
},
"type": "array",
"title": "Files"
}
},
"type": "object",
"required": [
"files"
],
"title": "BatchDeleteRequest",
"description": "批量删除请求模型"
},
"BatchDownloadRequest": {
"properties": {
"files": {
"items": {
"type": "string"
},
"type": "array",
"title": "Files"
}
},
"type": "object",
"required": [
"files"
],
"title": "BatchDownloadRequest",
"description": "批量下载请求模型"
},
"HTTPValidationError": {
"properties": {
"detail": {
"items": {
"$ref": "#/components/schemas/ValidationError"
},
"type": "array",
"title": "Detail"
}
},
"type": "object",
"title": "HTTPValidationError"
},
"ValidationError": {
"properties": {
"loc": {
"items": {
"anyOf": [
{
"type": "string"
},
{
"type": "integer"
}
]
},
"type": "array",
"title": "Location"
},
"msg": {
"type": "string",
"title": "Message"
},
"type": {
"type": "string",
"title": "Error Type"
}
},
"type": "object",
"required": [
"loc",
"msg",
"type"
],
"title": "ValidationError"
}
}
}
};
window.onload = function() {
SwaggerUIBundle({
spec: spec,
dom_id: '#swagger-ui',
deepLinking: true,
presets: [
SwaggerUIBundle.presets.apis,
SwaggerUIBundle.SwaggerUIStandalonePreset
],
layout: "BaseLayout",
defaultModelsExpandDepth: 1,
defaultModelExpandDepth: 1,
docExpansion: "list",
filter: true,
showExtensions: true,
showCommonExtensions: true
});
};
</script>
</body>
</html>

4
request/static/font-awesome.min.css vendored Normal file

File diff suppressed because one or more lines are too long

Binary file not shown.

Binary file not shown.

Binary file not shown.

175
request/static/index.html Normal file
View File

@@ -0,0 +1,175 @@
<!DOCTYPE html>
<html lang="zh-CN">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>X-Request 管理系统</title>
<!-- 引入Tailwind CSS (离线版本) -->
<script src="vendor/tailwind.min.js"></script>
<!-- 引入内联 SVG 图标系统 (完全离线) -->
<script src="vendor/icons.js"></script>
<style>
.inline-icon, .inline-emoji {
display: inline-block;
vertical-align: middle;
}
.inline-icon svg {
width: 1em;
height: 1em;
fill: currentColor;
}
.inline-emoji {
font-size: 1em;
line-height: 1;
}
@keyframes spin {
from { transform: rotate(0deg); }
to { transform: rotate(360deg); }
}
.inline-icon[data-spin="true"], .inline-emoji[data-spin="true"] {
animation: spin 1s linear infinite;
}
</style>
<!-- 配置Tailwind -->
<script>
tailwind.config = {
theme: {
extend: {
colors: {
primary: '#3b82f6',
secondary: '#8b5cf6',
success: '#10b981',
warning: '#f59e0b',
danger: '#ef4444',
},
},
}
}
</script>
<style type="text/tailwindcss">
@layer utilities {
.content-auto {
content-visibility: auto;
}
}
/* 全屏样式 */
html, body {
height: 100vh;
margin: 0;
padding: 0;
}
body {
display: flex;
flex-direction: column;
}
main {
flex: 1;
padding: 1rem;
}
</style>
</head>
<body class="bg-gray-50 min-h-screen">
<!-- 顶部导航栏 -->
<nav class="bg-white shadow-md h-16" style="display: none;">
<div class="w-full mx-auto px-4 sm:px-6 lg:px-8">
<div class="flex justify-between h-16 items-center">
<div class="flex items-center">
<div class="flex-shrink-0 flex items-center">
<span class="text-3xl font-bold text-primary">X</span>
<span class="ml-2 text-xl font-semibold text-gray-800">X-Request 管理系统</span>
</div>
<div class="ml-6 flex items-center space-x-4">
<a href="/" class="px-3 py-2 rounded-md text-sm font-medium text-primary bg-primary/10 border-b-2 border-primary">
<i class="fa fa-home mr-1"></i> 首页
</a>
<a href="/log.html" class="px-3 py-2 rounded-md text-sm font-medium text-gray-500 hover:text-primary hover:bg-primary/10 hover:border-b-2 hover:border-primary transition-colors">
<i class="fa fa-file-text mr-1"></i> 日志管理
</a>
<a href="/doc.html" class="px-3 py-2 rounded-md text-sm font-medium text-gray-500 hover:text-primary hover:bg-primary/10 hover:border-b-2 hover:border-primary transition-colors">
<i class="fa fa-book mr-1"></i> 接口文档
</a>
<a href="/status.html" class="px-3 py-2 rounded-md text-sm font-medium text-gray-500 hover:text-primary hover:bg-primary/10 hover:border-b-2 hover:border-primary transition-colors">
<i class="fa fa-heartbeat mr-1"></i> 系统状态
</a>
</div>
</div>
</div>
</div>
</nav>
<!-- 主要内容 -->
<main class="w-full px-4 sm:px-6 lg:px-8 py-4 flex items-center justify-center">
<div class="max-w-4xl w-full">
<div class="bg-white rounded-lg shadow-md p-8 text-center">
<div class="mb-8">
<span class="text-8xl font-bold text-primary mb-4 inline-block">X</span>
<h1 class="text-3xl font-bold text-gray-800 mb-2">X-Request 管理系统</h1>
</div>
<div class="grid grid-cols-1 md:grid-cols-2 lg:grid-cols-3 gap-6">
<!-- 日志管理卡片 -->
<a href="log.html" class="block p-6 h-72 bg-primary/5 border border-primary/20 rounded-lg hover:bg-primary/10 transition-colors transform hover:scale-105">
<div class="flex flex-col items-center justify-center h-full">
<div class="flex items-center justify-center mb-4">
<div class="w-16 h-16 bg-primary/20 rounded-full flex items-center justify-center">
<i class="fa fa-file-text text-2xl text-primary"></i>
</div>
</div>
<h2 class="text-xl font-semibold text-gray-800 mb-2">日志管理</h2>
<p class="text-gray-600 mb-4 text-center">查看、下载和管理系统日志文件</p>
<div class="inline-flex items-center text-primary font-medium mt-auto">
<span>进入日志管理</span>
<i class="fa fa-arrow-right ml-2"></i>
</div>
</div>
</a>
<!-- 接口文档卡片 -->
<a href="doc.html" class="block p-6 h-72 bg-primary/5 border border-primary/20 rounded-lg hover:bg-primary/10 transition-colors transform hover:scale-105">
<div class="flex flex-col items-center justify-center h-full">
<div class="flex items-center justify-center mb-4">
<div class="w-16 h-16 bg-primary/20 rounded-full flex items-center justify-center">
<i class="fa fa-book text-2xl text-primary"></i>
</div>
</div>
<h2 class="text-xl font-semibold text-gray-800 mb-2">接口文档</h2>
<p class="text-gray-600 mb-4 text-center">查看系统API接口文档和规范</p>
<div class="inline-flex items-center text-primary font-medium mt-auto">
<span>进入接口文档</span>
<i class="fa fa-arrow-right ml-2"></i>
</div>
</div>
</a>
<!-- 系统状态卡片 -->
<a href="status.html" class="block p-6 h-72 bg-success/5 border border-success/20 rounded-lg hover:bg-success/10 transition-colors transform hover:scale-105">
<div class="flex flex-col items-center justify-center h-full">
<div class="flex items-center justify-center mb-4">
<div class="w-16 h-16 bg-success/20 rounded-full flex items-center justify-center">
<i class="fa fa-heartbeat text-2xl text-success"></i>
</div>
</div>
<h2 class="text-xl font-semibold text-gray-800 mb-2">系统状态</h2>
<p class="text-gray-600 mb-4 text-center">查看健康检查、应用信息和系统状态</p>
<div class="inline-flex items-center text-success font-medium mt-auto">
<span>进入系统状态</span>
<i class="fa fa-arrow-right ml-2"></i>
</div>
</div>
</a>
</div>
</div>
</div>
</main>
<!-- 页脚 -->
<footer class="bg-white border-t border-gray-200 py-4">
<div class="w-full mx-auto px-4 sm:px-6 lg:px-8 text-center text-sm text-gray-500">
<p>X-Request © 2025</p>
</div>
</footer>
</body>
</html>

697
request/static/log.html Normal file
View File

@@ -0,0 +1,697 @@
<!DOCTYPE html>
<html lang="zh-CN">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>X-Request 日志管理</title>
<!-- 引入Tailwind CSS (离线版本) -->
<script src="vendor/tailwind.min.js"></script>
<!-- 引入内联 SVG 图标系统 (完全离线) -->
<script src="vendor/icons.js"></script>
<style>
.inline-icon, .inline-emoji {
display: inline-block;
vertical-align: middle;
}
.inline-icon svg {
width: 1em;
height: 1em;
fill: currentColor;
}
.inline-emoji {
font-size: 1em;
line-height: 1;
}
@keyframes spin {
from { transform: rotate(0deg); }
to { transform: rotate(360deg); }
}
.inline-icon[data-spin="true"], .inline-emoji[data-spin="true"] {
animation: spin 1s linear infinite;
}
</style>
<!-- 配置Tailwind -->
<script>
tailwind.config = {
theme: {
extend: {
colors: {
primary: '#3b82f6',
secondary: '#8b5cf6',
success: '#10b981',
warning: '#f59e0b',
danger: '#ef4444',
},
},
}
}
</script>
<style type="text/tailwindcss">
@layer utilities {
.content-auto {
content-visibility: auto;
}
.scrollbar-hide {
-ms-overflow-style: none;
scrollbar-width: none;
}
.scrollbar-hide::-webkit-scrollbar {
display: none;
}
}
/* 全屏样式 */
html, body {
height: 100vh;
margin: 0;
padding: 0;
}
body {
display: flex;
flex-direction: column;
}
main {
flex: 1;
padding: 1rem;
}
.max-w-7xl {
max-width: 100%;
}
/* 调整网格布局高度 */
.h-full {
height: 100%;
}
/* 调整滚动区域最大高度 */
.max-h-screen-content {
max-height: calc(100vh - 200px);
}
</style>
</head>
<body class="bg-gray-50 min-h-screen">
<!-- 顶部导航栏 -->
<nav class="bg-white shadow-md h-16">
<div class="w-full mx-auto px-4 sm:px-6 lg:px-8">
<div class="flex justify-between h-16 items-center">
<div class="flex items-center">
<div class="flex-shrink-0 flex items-center">
<span class="text-3xl font-bold text-primary">X</span>
<span class="ml-2 text-xl font-semibold text-gray-800">X-Request 管理系统</span>
</div>
<div class="ml-6 flex items-center space-x-4">
<a href="/" class="px-3 py-2 rounded-md text-sm font-medium text-gray-500 hover:text-primary hover:bg-primary/10 hover:border-b-2 hover:border-primary transition-colors">
<i class="fa fa-home mr-1"></i> 首页
</a>
<a href="/log.html" class="px-3 py-2 rounded-md text-sm font-medium text-primary bg-primary/10 border-b-2 border-primary">
<i class="fa fa-file-text mr-1"></i> 日志管理
</a>
<a href="/doc.html" class="px-3 py-2 rounded-md text-sm font-medium text-gray-500 hover:text-primary hover:bg-primary/10 hover:border-b-2 hover:border-primary transition-colors">
<i class="fa fa-book mr-1"></i> 接口文档
</a>
<a href="/status.html" class="px-3 py-2 rounded-md text-sm font-medium text-gray-500 hover:text-primary hover:bg-primary/10 hover:border-b-2 hover:border-primary transition-colors">
<i class="fa fa-heartbeat mr-1"></i> 系统状态
</a>
</div>
</div>
<div class="flex items-center">
<div class="mr-4">
<span class="text-sm text-gray-500">更新时间: </span>
<span id="last-updated" class="font-medium text-gray-800">--:--:--</span>
</div>
<button type="button" id="refresh-btn" class="bg-primary hover:bg-primary/90 text-white px-3 py-1.5 rounded-md text-sm font-medium transition-colors mr-2">
<i class="fa fa-refresh mr-1"></i> 刷新
</button>
<button type="button" id="batch-download-btn" onclick="batchDownload()" class="bg-success hover:bg-success/90 text-white px-3 py-1.5 rounded-md text-sm font-medium transition-colors disabled:opacity-50 disabled:cursor-not-allowed mr-2" disabled>
<i class="fa fa-download mr-1"></i> 批量下载
<span id="selected-count-download" class="ml-1 bg-white/20 px-2 py-0.5 rounded-full text-xs">0</span>
</button>
<button type="button" id="batch-delete-btn" onclick="batchDelete()" class="bg-danger hover:bg-danger/90 text-white px-3 py-1.5 rounded-md text-sm font-medium transition-colors disabled:opacity-50 disabled:cursor-not-allowed" disabled>
<i class="fa fa-trash mr-1"></i> 批量删除
<span id="selected-count-delete" class="ml-1 bg-white/20 px-2 py-0.5 rounded-full text-xs">0</span>
</button>
</div>
</div>
</div>
</nav>
<!-- 主要内容 -->
<main class="w-full px-4 sm:px-6 lg:px-8 py-4">
<!-- 日志管理视图 -->
<div class="bg-white rounded-lg shadow-md p-4 h-full">
<h2 class="text-xl font-bold text-gray-800 mb-4">日志管理</h2>
<!-- 日志分类和内容 -->
<div class="grid grid-cols-1 lg:grid-cols-12 gap-6 h-screen-content">
<!-- 左侧:日期列表 -->
<div class="lg:col-span-2">
<div class="bg-white rounded-lg shadow-sm p-4 h-full flex flex-col">
<h3 class="text-lg font-semibold text-gray-800 mb-4">按日期分类</h3>
<div class="space-y-2 flex-1 flex flex-col">
<div class="flex items-center">
<input type="checkbox" id="select-all-dates" class="rounded text-primary focus:ring-primary h-4 w-4 mr-2">
<label for="select-all-dates" class="text-sm font-medium text-gray-700">全选日期</label>
</div>
<div id="dates-list" class="space-y-1 flex-1 overflow-y-auto mt-2">
<!-- 日期列表将通过JavaScript动态生成 -->
<div class="text-center text-gray-500 py-8">
<i class="fa fa-spinner fa-spin text-xl mb-2"></i>
<p>加载中...</p>
</div>
</div>
</div>
</div>
</div>
<!-- 中间:日志文件列表 -->
<div class="lg:col-span-3">
<div class="bg-white rounded-lg shadow-sm p-4 h-full flex flex-col">
<div class="flex justify-between items-center mb-4">
<h3 id="selected-date-title" class="text-lg font-semibold text-gray-800">选择日期查看日志</h3>
<div class="flex items-center">
<input type="checkbox" id="select-all-logs" class="rounded text-primary focus:ring-primary h-4 w-4 mr-2">
<label for="select-all-logs" class="text-sm font-medium text-gray-700">全选日志</label>
</div>
</div>
<div id="logs-list" class="space-y-2 flex-1 overflow-y-auto">
<!-- 日志文件列表将通过JavaScript动态生成 -->
<div class="text-center text-gray-500 py-8">
<p>请选择左侧日期</p>
</div>
</div>
</div>
</div>
<!-- 右侧:日志内容 -->
<div class="lg:col-span-7">
<div class="bg-white rounded-lg shadow-sm p-4 h-full flex flex-col">
<div class="flex justify-between items-center mb-4">
<h3 id="current-log-title" class="text-lg font-semibold text-gray-800">日志内容</h3>
<div class="flex items-center space-x-2">
<select id="log-mode" class="border border-gray-300 rounded-md text-sm px-2 py-1">
<option value="latest" selected>最新的N条日志顶部</option>
<option value="oldest">最早的N条日志顶部</option>
</select>
<select id="log-lines" class="border border-gray-300 rounded-md text-sm px-2 py-1">
<option value="50">50行</option>
<option value="100" selected>100行</option>
<option value="200">200行</option>
<option value="500">500行</option>
<option value="1000">1000行</option>
<option value="2000">2000行</option>
</select>
</div>
</div>
<div class="bg-gray-900 text-gray-200 rounded-md p-4 flex-1 overflow-auto">
<pre id="log-content" class="text-sm whitespace-pre-wrap">请选择日志文件查看内容...</pre>
</div>
</div>
</div>
</div>
</div>
</main>
<!-- 自定义确认对话框 -->
<div id="confirm-modal" class="fixed inset-0 bg-black bg-opacity-50 hidden items-center justify-center z-50">
<div class="bg-white rounded-lg shadow-xl max-w-md w-full mx-4">
<div class="p-6">
<h3 class="text-lg font-semibold text-gray-800 mb-2" id="confirm-title">确认操作</h3>
<p class="text-gray-600 mb-6" id="confirm-message">确定要执行此操作吗?</p>
<div class="flex justify-end space-x-3">
<button type="button" id="confirm-cancel" class="px-4 py-2 border border-gray-300 rounded-md text-gray-700 hover:bg-gray-50 transition-colors">
取消
</button>
<button type="button" id="confirm-ok" class="px-4 py-2 bg-danger text-white rounded-md hover:bg-danger/90 transition-colors">
确定
</button>
</div>
</div>
</div>
</div>
<!-- JavaScript -->
<script>
// 全局变量
let selectedDate = null;
let currentLogFile = null;
let selectedFiles = new Set(); // 存储选中的日志文件路径
let dates = []; // 存储所有日期
// 自定义确认对话框
let confirmResolver = null;
function showConfirm(title, message) {
return new Promise((resolve) => {
confirmResolver = resolve;
const modal = document.getElementById('confirm-modal');
const titleEl = document.getElementById('confirm-title');
const messageEl = document.getElementById('confirm-message');
titleEl.textContent = title;
messageEl.textContent = message;
modal.classList.remove('hidden');
modal.classList.add('flex');
});
}
// 确认对话框事件监听
document.getElementById('confirm-cancel').addEventListener('click', () => {
const modal = document.getElementById('confirm-modal');
modal.classList.add('hidden');
modal.classList.remove('flex');
if (confirmResolver) {
confirmResolver(false);
confirmResolver = null;
}
});
document.getElementById('confirm-ok').addEventListener('click', () => {
const modal = document.getElementById('confirm-modal');
modal.classList.add('hidden');
modal.classList.remove('flex');
if (confirmResolver) {
confirmResolver(true);
confirmResolver = null;
}
});
// 工具函数
function formatBytes(bytes, decimals = 2) {
if (bytes === 0) return '0 Bytes';
const k = 1024;
const dm = decimals < 0 ? 0 : decimals;
const sizes = ['Bytes', 'KB', 'MB', 'GB', 'TB', 'PB'];
const i = Math.floor(Math.log(bytes) / Math.log(k));
return parseFloat((bytes / Math.pow(k, i)).toFixed(dm)) + ' ' + sizes[i];
}
// 获取日期列表
async function fetchDates() {
try {
const response = await fetch('/monitoring/logs');
const data = await response.json();
if (data.success) {
dates = data.data.dates || [];
renderDatesList();
return true;
}
} catch (error) {
console.error('获取日期列表失败:', error);
document.getElementById('dates-list').innerHTML = '<div class="text-center text-red-500 py-8"><i class="fa fa-exclamation-circle text-xl mb-2"></i><p>获取日期列表失败</p></div>';
}
return false;
}
// 渲染日期列表
function renderDatesList() {
const datesList = document.getElementById('dates-list');
if (dates.length === 0) {
datesList.innerHTML = '<div class="text-center text-gray-500 py-8"><p>没有找到日志日期</p></div>';
return;
}
let html = '';
for (const date of dates) {
const isSelected = selectedDate === date.date;
html += `
<div class="flex items-center p-2 rounded-md border border-gray-200 cursor-pointer hover:bg-gray-100 transition-colors ${isSelected ? 'bg-primary/10 border-l-4 border-primary' : ''}">
<input type="checkbox" data-date="${date.date}" class="date-checkbox rounded text-primary focus:ring-primary h-4 w-4 mr-2">
<div class="flex-1" onclick="selectDate('${date.date}')">
<div class="font-medium text-gray-800">${date.date}</div>
<div class="text-xs text-gray-500">${date.log_count} 个日志文件</div>
</div>
<div class="text-xs text-gray-500">${new Date(date.last_modified * 1000).toLocaleString()}</div>
</div>
`;
}
datesList.innerHTML = html;
// 添加日期复选框事件监听
document.querySelectorAll('.date-checkbox').forEach(checkbox => {
checkbox.addEventListener('change', handleDateCheckboxChange);
});
}
// 选择日期
async function selectDate(date) {
selectedDate = date;
currentLogFile = null;
selectedFiles.clear();
updateSelectedCount();
// 更新日期列表样式
renderDatesList();
// 更新日志列表
await fetchLogsByDate(date);
// 清空日志内容
document.getElementById('current-log-title').textContent = '日志内容';
document.getElementById('log-content').textContent = '请选择日志文件查看内容...';
}
// 获取指定日期的日志文件
async function fetchLogsByDate(date) {
try {
const response = await fetch(`/monitoring/logs?date=${date}`);
const data = await response.json();
if (data.success) {
const logs = data.data.logs || [];
renderLogsList(logs, date);
return true;
}
} catch (error) {
console.error('获取日志文件列表失败:', error);
document.getElementById('logs-list').innerHTML = '<div class="text-center text-red-500 py-8"><i class="fa fa-exclamation-circle text-xl mb-2"></i><p>获取日志文件列表失败</p></div>';
}
return false;
}
// 渲染日志文件列表
function renderLogsList(logs, date) {
const logsList = document.getElementById('logs-list');
const selectedDateTitle = document.getElementById('selected-date-title');
selectedDateTitle.textContent = `${date} 日志文件 (${logs.length} 个)`;
if (logs.length === 0) {
logsList.innerHTML = '<div class="text-center text-gray-500 py-8"><p>该日期没有日志文件</p></div>';
return;
}
let html = '';
for (const log of logs) {
const isSelected = selectedFiles.has(log.relative_path);
// 将Windows风格的路径分隔符替换为Unix风格
const displayPath = log.relative_path.replace(/\\/g, '/');
html += `
<div class="flex items-center p-2 rounded-md border border-gray-200 hover:bg-gray-100 transition-colors">
<input type="checkbox" data-path="${log.relative_path}" class="log-checkbox rounded text-primary focus:ring-primary h-4 w-4 mr-2" ${isSelected ? 'checked' : ''}>
<div class="flex-1" onclick="viewLog('${displayPath}', '${log.name}')">
<div class="font-medium text-gray-800">${log.name}</div>
<div class="text-xs text-gray-500">${formatBytes(log.size)} · ${new Date(log.modified_at * 1000).toLocaleString()}</div>
</div>
<a href="/monitoring/logs/${log.relative_path}/download" target="_blank" class="text-primary hover:text-primary/80 mr-2" title="下载">
<i class="fa fa-download"></i>
</a>
<button type="button" onclick="deleteLog('${displayPath}', '${log.name}')" class="text-danger hover:text-danger/80" title="删除">
<i class="fa fa-trash"></i>
</button>
</div>
`;
}
logsList.innerHTML = html;
// 添加日志复选框事件监听
document.querySelectorAll('.log-checkbox').forEach(checkbox => {
checkbox.addEventListener('change', handleLogCheckboxChange);
});
}
// 查看日志内容
async function viewLog(filePath, fileName) {
currentLogFile = filePath;
// 更新标题
document.getElementById('current-log-title').textContent = `日志内容 - ${fileName}`;
try {
const entries = document.getElementById('log-lines').value;
const mode = document.getElementById('log-mode').value;
// 将Windows风格的路径分隔符替换为Unix风格
const normalizedPath = filePath.replace(/\\/g, '/');
const parts = normalizedPath.split('/');
const date = parts[0];
const logName = parts.slice(1).join('/');
const response = await fetch(`/monitoring/logs/${date}/${logName}?entries=${entries}&mode=${mode}`);
const data = await response.json();
if (data.success) {
document.getElementById('log-content').textContent = data.data.content;
// 根据显示模式滚动
const logContent = document.getElementById('log-content');
logContent.scrollTop = 0;
} else {
document.getElementById('log-content').textContent = `获取日志内容失败: ${data.message}`;
}
} catch (error) {
console.error('获取日志内容失败:', error);
document.getElementById('log-content').textContent = `获取日志内容失败: ${error.message}`;
}
}
// 处理日期复选框变化
async function handleDateCheckboxChange(e) {
const date = e.target.dataset.date;
const isChecked = e.target.checked;
if (isChecked) {
// 选中该日期下的所有日志文件
await fetchLogsByDate(date);
// 勾选该日期下的所有日志文件
document.querySelectorAll('.log-checkbox').forEach(checkbox => {
const path = checkbox.dataset.path;
if (path && path.startsWith(date)) {
checkbox.checked = true;
selectedFiles.add(path);
}
});
} else {
// 取消选中该日期下的所有日志文件
document.querySelectorAll('.log-checkbox').forEach(checkbox => {
const path = checkbox.dataset.path;
if (path && path.startsWith(date)) {
checkbox.checked = false;
selectedFiles.delete(path);
}
});
}
updateSelectedCount();
}
// 处理日志复选框变化
function handleLogCheckboxChange(e) {
const filePath = e.target.dataset.path;
const isChecked = e.target.checked;
if (isChecked) {
selectedFiles.add(filePath);
} else {
selectedFiles.delete(filePath);
}
updateSelectedCount();
}
// 更新选中计数
function updateSelectedCount() {
const count = selectedFiles.size;
// 更新批量下载的计数
document.getElementById('selected-count-download').textContent = count;
// 更新批量删除的计数
document.getElementById('selected-count-delete').textContent = count;
// 更新按钮状态
document.getElementById('batch-download-btn').disabled = count === 0;
document.getElementById('batch-delete-btn').disabled = count === 0;
}
// 删除单个日志文件
async function deleteLog(filePath, fileName) {
const confirmed = await showConfirm('删除确认', `确定要删除日志文件 "${fileName}" 吗?此操作不可恢复。`);
if (!confirmed) {
return;
}
try {
const response = await fetch(`/monitoring/logs/${filePath}`, {
method: 'DELETE'
});
const data = await response.json();
if (data.success) {
// 更新日志列表
await fetchLogsByDate(selectedDate);
// 清空日志内容如果当前显示的是被删除的日志
if (currentLogFile === filePath) {
document.getElementById('current-log-title').textContent = '日志内容';
document.getElementById('log-content').textContent = '请选择日志文件查看内容...';
currentLogFile = null;
}
// 更新选中文件集合
selectedFiles.delete(filePath);
updateSelectedCount();
// 显示成功提示
alert(`日志文件 "${fileName}" 删除成功`);
} else {
alert(`删除失败: ${data.message}`);
}
} catch (error) {
console.error('删除日志失败:', error);
alert(`删除失败: ${error.message}`);
}
}
// 批量删除日志文件
async function batchDelete() {
if (selectedFiles.size === 0) {
alert('请先选择日志文件');
return;
}
const confirmed = await showConfirm('批量删除确认', `确定要删除选中的 ${selectedFiles.size} 个日志文件吗?此操作不可恢复。`);
if (!confirmed) {
return;
}
try {
const response = await fetch('/monitoring/logs/batch/delete', {
method: 'POST',
headers: {
'Content-Type': 'application/json'
},
body: JSON.stringify({
files: Array.from(selectedFiles)
})
});
const data = await response.json();
if (data.success) {
// 更新日志列表
await fetchLogsByDate(selectedDate);
// 清空当前日志内容
document.getElementById('current-log-title').textContent = '日志内容';
document.getElementById('log-content').textContent = '请选择日志文件查看内容...';
currentLogFile = null;
// 清空选中文件集合
selectedFiles.clear();
updateSelectedCount();
// 显示成功提示
alert(`批量删除完成。成功删除 ${data.data.deleted} 个文件,失败 ${data.data.failed} 个文件。`);
} else {
alert(`批量删除失败: ${data.message}`);
}
} catch (error) {
console.error('批量删除失败:', error);
alert(`批量删除失败: ${error.message}`);
}
}
// 全选/取消全选日期
function toggleSelectAllDates() {
const selectAllCheckbox = document.getElementById('select-all-dates');
const isChecked = selectAllCheckbox.checked;
document.querySelectorAll('.date-checkbox').forEach(checkbox => {
checkbox.checked = isChecked;
// 触发change事件调用handleDateCheckboxChange处理日志文件联动
checkbox.dispatchEvent(new Event('change'));
});
}
// 全选/取消全选日志
function toggleSelectAllLogs() {
const selectAllCheckbox = document.getElementById('select-all-logs');
const isChecked = selectAllCheckbox.checked;
document.querySelectorAll('.log-checkbox').forEach(checkbox => {
checkbox.checked = isChecked;
checkbox.dispatchEvent(new Event('change'));
});
}
// 批量下载日志
async function batchDownload() {
if (selectedFiles.size === 0) {
alert('请先选择日志文件');
return;
}
try {
const response = await fetch('/monitoring/logs/download', {
method: 'POST',
headers: {
'Content-Type': 'application/json'
},
body: JSON.stringify({
files: Array.from(selectedFiles)
})
});
if (response.ok) {
// 创建下载链接
const blob = await response.blob();
const url = window.URL.createObjectURL(blob);
const a = document.createElement('a');
a.href = url;
a.download = `logs_${new Date().toISOString().slice(0, 10)}_batch.zip`;
document.body.appendChild(a);
a.click();
document.body.removeChild(a);
window.URL.revokeObjectURL(url);
} else {
const data = await response.json();
alert(`批量下载失败: ${data.message}`);
}
} catch (error) {
console.error('批量下载失败:', error);
alert(`批量下载失败: ${error.message}`);
}
}
// 刷新所有数据
async function refreshAll() {
await fetchDates();
if (selectedDate) {
await fetchLogsByDate(selectedDate);
}
// 更新最后更新时间
const now = new Date();
document.getElementById('last-updated').textContent = now.toLocaleTimeString();
}
// 初始化
async function init() {
// 初始加载数据
await refreshAll();
// 事件监听
document.getElementById('refresh-btn').addEventListener('click', refreshAll);
document.getElementById('select-all-dates').addEventListener('change', toggleSelectAllDates);
document.getElementById('select-all-logs').addEventListener('change', toggleSelectAllLogs);
// 日志行数或显示模式变化事件
document.getElementById('log-lines').addEventListener('change', () => {
if (currentLogFile) {
// 直接调用viewLog使用当前的filePath和fileName
viewLog(currentLogFile, document.getElementById('current-log-title').textContent.replace('日志内容 - ', ''));
}
});
// 显示模式变化事件
document.getElementById('log-mode').addEventListener('change', () => {
if (currentLogFile) {
// 直接调用viewLog使用当前的filePath和fileName
viewLog(currentLogFile, document.getElementById('current-log-title').textContent.replace('日志内容 - ', ''));
}
});
}
// 页面加载完成后初始化
document.addEventListener('DOMContentLoaded', init);
</script>
</body>
</html>

474
request/static/status.html Normal file
View File

@@ -0,0 +1,474 @@
<!DOCTYPE html>
<html lang="zh-CN">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>系统状态 - X-Request</title>
<!-- 引入Tailwind CSS (离线版本) -->
<script src="vendor/tailwind.min.js"></script>
<!-- 引入内联 SVG 图标系统 (完全离线) -->
<script src="vendor/icons.js"></script>
<style>
.inline-icon, .inline-emoji {
display: inline-block;
vertical-align: middle;
}
.inline-icon svg {
width: 1em;
height: 1em;
fill: currentColor;
}
.inline-emoji {
font-size: 1em;
line-height: 1;
}
@keyframes spin {
from { transform: rotate(0deg); }
to { transform: rotate(360deg); }
}
.inline-icon[data-spin="true"], .inline-emoji[data-spin="true"] {
animation: spin 1s linear infinite;
}
</style>
<!-- 配置Tailwind -->
<script>
tailwind.config = {
theme: {
extend: {
colors: {
primary: '#3b82f6',
secondary: '#8b5cf6',
success: '#10b981',
warning: '#f59e0b',
danger: '#ef4444',
},
},
}
}
</script>
<style type="text/tailwindcss">
@layer utilities {
.content-auto {
content-visibility: auto;
}
}
/* 全屏样式 */
html, body {
height: 100vh;
margin: 0;
padding: 0;
}
body {
display: flex;
flex-direction: column;
}
main {
flex: 1;
padding: 1rem;
}
.status-card {
transition: all 0.3s ease;
}
.status-card:hover {
transform: translateY(-2px);
box-shadow: 0 10px 25px rgba(0, 0, 0, 0.1);
}
.pulse-animation {
animation: pulse 2s cubic-bezier(0.4, 0, 0.6, 1) infinite;
}
@keyframes pulse {
0%, 100% {
opacity: 1;
}
50% {
opacity: .5;
}
}
</style>
</head>
<body class="bg-gray-50 min-h-screen">
<!-- 顶部导航栏 -->
<nav class="bg-white shadow-md h-16">
<div class="w-full mx-auto px-4 sm:px-6 lg:px-8">
<div class="flex justify-between h-16 items-center">
<div class="flex items-center">
<div class="flex-shrink-0 flex items-center">
<span class="text-3xl font-bold text-primary">X</span>
<span class="ml-2 text-xl font-semibold text-gray-800">X-Request 管理系统</span>
</div>
<div class="ml-6 flex items-center space-x-4">
<a href="/" class="px-3 py-2 rounded-md text-sm font-medium text-gray-500 hover:text-primary hover:bg-primary/10 hover:border-b-2 hover:border-primary transition-colors">
<i class="fa fa-home mr-1"></i> 首页
</a>
<a href="/log.html" class="px-3 py-2 rounded-md text-sm font-medium text-gray-500 hover:text-primary hover:bg-primary/10 hover:border-b-2 hover:border-primary transition-colors">
<i class="fa fa-file-text mr-1"></i> 日志管理
</a>
<a href="/doc.html" class="px-3 py-2 rounded-md text-sm font-medium text-gray-500 hover:text-primary hover:bg-primary/10 hover:border-b-2 hover:border-primary transition-colors">
<i class="fa fa-book mr-1"></i> 接口文档
</a>
<a href="/status.html" class="px-3 py-2 rounded-md text-sm font-medium text-primary bg-primary/10 border-b-2 border-primary">
<i class="fa fa-heartbeat mr-1"></i> 系统状态
</a>
</div>
</div>
<div class="flex items-center space-x-4">
<button id="refresh-btn" class="px-4 py-2 bg-primary text-white rounded-md hover:bg-primary/90 transition-colors flex items-center">
<i class="fa fa-refresh mr-2"></i>
<span>刷新</span>
</button>
<a href="index.html" class="px-4 py-2 bg-gray-200 text-gray-700 rounded-md hover:bg-gray-300 transition-colors flex items-center">
<i class="fa fa-home mr-2"></i>
<span>返回首页</span>
</a>
</div>
</div>
</div>
</nav>
<!-- 主要内容 -->
<main class="w-full px-4 sm:px-6 lg:px-8 py-4">
<div class="max-w-6xl mx-auto">
<!-- 健康检查卡片 -->
<div class="bg-white rounded-lg shadow-md p-6 mb-6 status-card">
<div class="flex items-center justify-between mb-4">
<h2 class="text-2xl font-bold text-gray-800 flex items-center">
<i class="fa fa-heartbeat text-primary mr-3"></i>
健康检查
</h2>
<div id="health-status" class="flex items-center">
<span class="px-3 py-1 rounded-full text-sm font-medium bg-gray-200 text-gray-700">
<i class="fa fa-spinner fa-spin mr-2"></i>
检查中...
</span>
</div>
</div>
<div id="health-content" class="space-y-3">
<div class="flex items-center justify-center py-8">
<i class="fa fa-spinner fa-spin text-3xl text-primary"></i>
<span class="ml-3 text-gray-600">正在加载健康检查信息...</span>
</div>
</div>
</div>
<!-- 应用信息卡片 -->
<div class="bg-white rounded-lg shadow-md p-6 mb-6 status-card">
<div class="flex items-center justify-between mb-4">
<h2 class="text-2xl font-bold text-gray-800 flex items-center">
<i class="fa fa-info-circle text-primary mr-3"></i>
应用信息
</h2>
<div class="text-sm text-gray-500">
<i class="fa fa-clock-o mr-1"></i>
<span id="last-updated">--</span>
</div>
</div>
<div id="info-content" class="space-y-3">
<div class="flex items-center justify-center py-8">
<i class="fa fa-spinner fa-spin text-3xl text-primary"></i>
<span class="ml-3 text-gray-600">正在加载应用信息...</span>
</div>
</div>
</div>
<!-- 系统状态卡片 -->
<div class="bg-white rounded-lg shadow-md p-6 status-card">
<div class="flex items-center justify-between mb-4">
<h2 class="text-2xl font-bold text-gray-800 flex items-center">
<i class="fa fa-server text-primary mr-3"></i>
系统状态
</h2>
</div>
<div id="system-content" class="space-y-3">
<div class="flex items-center justify-center py-8">
<i class="fa fa-spinner fa-spin text-3xl text-primary"></i>
<span class="ml-3 text-gray-600">正在加载系统状态...</span>
</div>
</div>
</div>
</div>
</main>
<!-- 页脚 -->
<footer class="bg-white border-t border-gray-200 py-4 mt-6">
<div class="w-full mx-auto px-4 sm:px-6 lg:px-8 text-center text-sm text-gray-500">
<p>X-Request © 2025</p>
</div>
</footer>
<script>
// 工具函数
function formatTimestamp(timestamp) {
if (!timestamp) return '--';
const date = new Date(timestamp * 1000);
return date.toLocaleString('zh-CN');
}
function formatDateTime(date) {
return date.toLocaleString('zh-CN');
}
// 获取健康检查信息
async function fetchHealth() {
try {
const response = await fetch('/health');
const data = await response.json();
const healthStatus = document.getElementById('health-status');
const healthContent = document.getElementById('health-content');
if (data.status === 1 && data.response) {
// 健康状态
healthStatus.innerHTML = `
<span class="px-3 py-1 rounded-full text-sm font-medium bg-success/20 text-success border border-success/30">
<i class="fa fa-check-circle mr-2"></i>
健康
</span>
`;
// 健康信息内容
healthContent.innerHTML = `
<div class="grid grid-cols-1 md:grid-cols-2 gap-4">
<div class="bg-success/5 border border-success/20 rounded-lg p-4">
<div class="flex items-center mb-2">
<i class="fa fa-check-circle text-success text-xl mr-2"></i>
<span class="font-semibold text-gray-800">服务状态</span>
</div>
<p class="text-2xl font-bold text-success">${data.response.status || 'healthy'}</p>
</div>
<div class="bg-primary/5 border border-primary/20 rounded-lg p-4">
<div class="flex items-center mb-2">
<i class="fa fa-cog text-primary text-xl mr-2"></i>
<span class="font-semibold text-gray-800">服务名称</span>
</div>
<p class="text-lg font-semibold text-gray-700">${data.response.service || 'Unknown'}</p>
</div>
</div>
<div class="mt-4 pt-4 border-t border-gray-200">
<div class="text-sm text-gray-500">
<i class="fa fa-clock-o mr-1"></i>
检查时间: ${data.time || '--'}
</div>
</div>
`;
} else {
// 不健康状态
healthStatus.innerHTML = `
<span class="px-3 py-1 rounded-full text-sm font-medium bg-danger/20 text-danger border border-danger/30">
<i class="fa fa-exclamation-circle mr-2"></i>
异常
</span>
`;
healthContent.innerHTML = `
<div class="bg-danger/5 border border-danger/20 rounded-lg p-4">
<p class="text-danger font-semibold">健康检查失败</p>
<p class="text-sm text-gray-600 mt-2">${data.response?.error || '未知错误'}</p>
</div>
`;
}
} catch (error) {
const healthStatus = document.getElementById('health-status');
const healthContent = document.getElementById('health-content');
healthStatus.innerHTML = `
<span class="px-3 py-1 rounded-full text-sm font-medium bg-danger/20 text-danger border border-danger/30">
<i class="fa fa-times-circle mr-2"></i>
错误
</span>
`;
healthContent.innerHTML = `
<div class="bg-danger/5 border border-danger/20 rounded-lg p-4">
<p class="text-danger font-semibold">无法获取健康检查信息</p>
<p class="text-sm text-gray-600 mt-2">${error.message}</p>
</div>
`;
}
}
// 获取应用信息
async function fetchInfo() {
try {
const response = await fetch('/info');
const data = await response.json();
const infoContent = document.getElementById('info-content');
if (data.status === 1 && data.response) {
const info = data.response;
infoContent.innerHTML = `
<div class="grid grid-cols-1 md:grid-cols-2 lg:grid-cols-3 gap-4">
<div class="bg-primary/5 border border-primary/20 rounded-lg p-4">
<div class="flex items-center mb-2">
<i class="fa fa-tag text-primary text-xl mr-2"></i>
<span class="font-semibold text-gray-800">应用名称</span>
</div>
<p class="text-lg font-semibold text-gray-700">${info.app_name || 'Unknown'}</p>
</div>
<div class="bg-secondary/5 border border-secondary/20 rounded-lg p-4">
<div class="flex items-center mb-2">
<i class="fa fa-code-fork text-secondary text-xl mr-2"></i>
<span class="font-semibold text-gray-800">版本号</span>
</div>
<p class="text-lg font-semibold text-gray-700">${info.version || 'Unknown'}</p>
</div>
<div class="bg-${info.debug ? 'warning' : 'success'}/5 border border-${info.debug ? 'warning' : 'success'}/20 rounded-lg p-4">
<div class="flex items-center mb-2">
<i class="fa fa-${info.debug ? 'bug' : 'shield'} text-${info.debug ? 'warning' : 'success'} text-xl mr-2"></i>
<span class="font-semibold text-gray-800">调试模式</span>
</div>
<p class="text-lg font-semibold text-${info.debug ? 'warning' : 'success'}">${info.debug ? '已启用' : '已禁用'}</p>
</div>
<div class="bg-gray-50 border border-gray-200 rounded-lg p-4">
<div class="flex items-center mb-2">
<i class="fa fa-server text-gray-600 text-xl mr-2"></i>
<span class="font-semibold text-gray-800">主机地址</span>
</div>
<p class="text-lg font-semibold text-gray-700">${info.host || 'Unknown'}</p>
</div>
<div class="bg-gray-50 border border-gray-200 rounded-lg p-4">
<div class="flex items-center mb-2">
<i class="fa fa-plug text-gray-600 text-xl mr-2"></i>
<span class="font-semibold text-gray-800">端口号</span>
</div>
<p class="text-lg font-semibold text-gray-700">${info.port || 'Unknown'}</p>
</div>
<div class="bg-gray-50 border border-gray-200 rounded-lg p-4">
<div class="flex items-center mb-2">
<i class="fa fa-link text-gray-600 text-xl mr-2"></i>
<span class="font-semibold text-gray-800">访问地址</span>
</div>
<p class="text-lg font-semibold text-gray-700">http://${info.host === '0.0.0.0' ? 'localhost' : info.host}:${info.port || 'Unknown'}</p>
</div>
</div>
<div class="mt-4 pt-4 border-t border-gray-200">
<div class="text-sm text-gray-500">
<i class="fa fa-clock-o mr-1"></i>
更新时间: ${data.time || '--'}
</div>
</div>
`;
} else {
infoContent.innerHTML = `
<div class="bg-danger/5 border border-danger/20 rounded-lg p-4">
<p class="text-danger font-semibold">无法获取应用信息</p>
<p class="text-sm text-gray-600 mt-2">${data.response?.error || '未知错误'}</p>
</div>
`;
}
} catch (error) {
const infoContent = document.getElementById('info-content');
infoContent.innerHTML = `
<div class="bg-danger/5 border border-danger/20 rounded-lg p-4">
<p class="text-danger font-semibold">无法获取应用信息</p>
<p class="text-sm text-gray-600 mt-2">${error.message}</p>
</div>
`;
}
}
// 获取系统状态
async function fetchSystemStatus() {
try {
const response = await fetch('/monitoring/status');
const data = await response.json();
const systemContent = document.getElementById('system-content');
if (data.success && data.data && data.data.system) {
const system = data.data.system;
systemContent.innerHTML = `
<div class="grid grid-cols-1 md:grid-cols-2 gap-4">
<div class="bg-gray-50 border border-gray-200 rounded-lg p-4">
<div class="flex items-center mb-2">
<i class="fa fa-desktop text-gray-600 text-xl mr-2"></i>
<span class="font-semibold text-gray-800">主机名</span>
</div>
<p class="text-lg font-semibold text-gray-700">${system.hostname || 'Unknown'}</p>
</div>
<div class="bg-gray-50 border border-gray-200 rounded-lg p-4">
<div class="flex items-center mb-2">
<i class="fa fa-linux text-gray-600 text-xl mr-2"></i>
<span class="font-semibold text-gray-800">系统类型</span>
</div>
<p class="text-lg font-semibold text-gray-700">${system.system_type || 'Unknown'}</p>
</div>
<div class="bg-gray-50 border border-gray-200 rounded-lg p-4">
<div class="flex items-center mb-2">
<i class="fa fa-python text-gray-600 text-xl mr-2"></i>
<span class="font-semibold text-gray-800">Python 版本</span>
</div>
<p class="text-lg font-semibold text-gray-700">${system.python_version || 'Unknown'}</p>
</div>
<div class="bg-gray-50 border border-gray-200 rounded-lg p-4">
<div class="flex items-center mb-2">
<i class="fa fa-clock-o text-gray-600 text-xl mr-2"></i>
<span class="font-semibold text-gray-800">时间戳</span>
</div>
<p class="text-lg font-semibold text-gray-700">${formatTimestamp(system.timestamp)}</p>
</div>
</div>
`;
} else {
systemContent.innerHTML = `
<div class="bg-warning/5 border border-warning/20 rounded-lg p-4">
<p class="text-warning font-semibold">系统状态信息不可用</p>
</div>
`;
}
} catch (error) {
const systemContent = document.getElementById('system-content');
systemContent.innerHTML = `
<div class="bg-warning/5 border border-warning/20 rounded-lg p-4">
<p class="text-warning font-semibold">无法获取系统状态</p>
<p class="text-sm text-gray-600 mt-2">${error.message}</p>
</div>
`;
}
}
// 刷新所有信息
async function refreshAll() {
const lastUpdated = document.getElementById('last-updated');
lastUpdated.textContent = formatDateTime(new Date());
await Promise.all([
fetchHealth(),
fetchInfo(),
fetchSystemStatus()
]);
}
// 初始化
async function init() {
// 初始加载数据
await refreshAll();
// 刷新按钮事件
document.getElementById('refresh-btn').addEventListener('click', async () => {
const btn = document.getElementById('refresh-btn');
const icon = btn.querySelector('i');
icon.classList.add('fa-spin');
btn.disabled = true;
await refreshAll();
icon.classList.remove('fa-spin');
btn.disabled = false;
});
// 自动刷新每30秒
setInterval(refreshAll, 30000);
}
// 页面加载完成后初始化
document.addEventListener('DOMContentLoaded', init);
</script>
</body>
</html>

File diff suppressed because one or more lines are too long

Binary file not shown.

Binary file not shown.

Binary file not shown.

140
request/static/vendor/icons.js vendored Normal file
View File

@@ -0,0 +1,140 @@
/**
* Emoji Icons - Beautiful Emoji Icons (Offline)
* Modern, beautiful emoji icons, completely offline
* Compatible with existing fa-* class names
*/
const Icons = {
// 文档相关图标
'document-text': '📄',
'document': '📄',
'file-text': '📝',
'book-open': '📖',
'book': '📖',
// 健康/状态相关图标
'heart-pulse': '💓',
'heart': '❤️',
'check-circle': '✅',
'x-circle': '❌',
'exclamation-triangle': '⚠️',
'information-circle': '',
// 导航相关图标
'home': '🏠',
'arrow-right': '➡️',
'arrow-left': '⬅️',
// 操作相关图标
'arrow-downTray': '⬇️',
'download': '⬇️',
'trash': '🗑️',
'folder': '📁',
'magnifying-glass': '🔍',
'eye': '👁️',
'check-square': '☑️',
'square': '⬜',
'ellipsis-horizontal': '⋯',
// 系统相关图标
'server': '🖥️',
'chart-bar': '📊',
'calendar': '📅',
'clock': '🕐',
'user': '👤',
'cpu-chip': '💾',
'circle-stack': '🗃️',
'globe-alt': '🌐',
// 设置相关图标
'cog-6-tooth': '⚙️',
'bell': '🔔',
'inbox': '📥',
// 新增:文件操作
'folder-plus': '📁➕',
'document-plus': '📝➕',
'arrow-path': '🔄',
};
// 创建图标元素的辅助函数
function createIcon(name, className = '') {
const emoji = Icons[name];
if (!emoji) {
console.warn(`Icon "${name}" not found`);
return '';
}
return `<span class="inline-emoji ${className}" data-icon="${name}">${emoji}</span>`;
}
// 初始化:替换所有 <i class="fa fa-"> 为 Emoji 图标
function initIcons() {
// 映射旧图标名到新图标名
const iconMapping = {
'fa-file-text-o': 'document-text',
'fa-file-text': 'document-text',
'fa-book': 'book-open',
'fa-book-o': 'book-open',
'fa-heartbeat': 'heart-pulse',
'fa-home': 'home',
'fa-arrow-right': 'arrow-right',
'fa-download': 'download',
'fa-trash': 'trash',
'fa-check-circle': 'check-circle',
'fa-times-circle': 'x-circle',
'fa-info-circle': 'information-circle',
'fa-exclamation-circle': 'exclamation-triangle',
'fa-refresh': 'arrow-path',
'fa-server': 'server',
'fa-clock-o': 'clock',
'fa-eye': 'eye',
};
// 查找所有 Font Awesome 图标
document.querySelectorAll('i.fa').forEach(icon => {
const classes = Array.from(icon.classList);
const iconName = classes.find(cls => cls.startsWith('fa-') && cls !== 'fa');
if (iconName) {
// 映射到新图标名
const mappedName = iconMapping[iconName] || iconName.replace('fa-', '');
const emoji = Icons[mappedName];
if (emoji) {
// 保留原有的类名(除了 fa 和 fa-*
const otherClasses = classes.filter(cls => !cls.startsWith('fa'));
const newElement = document.createElement('span');
newElement.className = `inline-emoji ${otherClasses.join(' ')}`;
newElement.textContent = emoji;
newElement.style.display = 'inline-block';
newElement.style.verticalAlign = 'middle';
// 处理旋转动画
if (classes.includes('fa-spin')) {
newElement.setAttribute('data-spin', 'true');
}
// 复制样式
const computedStyle = window.getComputedStyle(icon);
if (computedStyle.fontSize) {
newElement.style.fontSize = computedStyle.fontSize;
}
icon.parentNode.replaceChild(newElement, icon);
}
}
});
}
// 页面加载完成后初始化
if (document.readyState === 'loading') {
document.addEventListener('DOMContentLoaded', initIcons);
} else {
initIcons();
}
// 添加便捷方法到全局
window.Icons = {
create: createIcon,
render: (name, className = '') => createIcon(name, className),
};

File diff suppressed because one or more lines are too long

3
request/static/vendor/swagger-ui.css vendored Normal file

File diff suppressed because one or more lines are too long

65
request/static/vendor/tailwind.min.js vendored Normal file

File diff suppressed because one or more lines are too long

83
request/stop.sh Normal file
View File

@@ -0,0 +1,83 @@
#!/usr/bin/env bash
set -euo pipefail
echo "🛑 停止 X-Request (FastAPI)"
# 永远从脚本所在目录运行
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
cd "$SCRIPT_DIR"
PID_FILE="logs/xrequest.pid"
# 读取 .env 文件中的配置
PORT="8000" # 默认端口
# 解析 .env 文件中的 PORT
if [ -f ".env" ]; then
# 使用 grep 和 sed 来提取 PORT忽略注释
PORT=$(grep -v '^#' .env | grep '^PORT=' | head -1 | cut -d= -f2 | tr -d '"' || echo "8000")
fi
echo "📋 使用配置: PORT=$PORT"
kill_pid() {
local pid="$1"
if [ -z "$pid" ]; then
return 1
fi
# Windows Git Bash: 优先 taskkill对 python.exe 更稳)
if command -v taskkill >/dev/null 2>&1; then
taskkill //F //PID "$pid" >/dev/null 2>&1 || true
return 0
fi
# Linux/macOS
kill "$pid" >/dev/null 2>&1 || true
sleep 0.3
kill -9 "$pid" >/dev/null 2>&1 || true
return 0
}
if [ -f "$PID_FILE" ]; then
PID="$(cat "$PID_FILE" 2>/dev/null || true)"
if [ -n "$PID" ] && kill -0 "$PID" >/dev/null 2>&1; then
echo "🔪 终止进程 pid=$PID ..."
kill_pid "$PID"
rm -f "$PID_FILE" || true
echo "✅ 已停止"
exit 0
fi
# pid 文件存在但进程不存在:清理
rm -f "$PID_FILE" || true
fi
echo " 未找到有效 PID 文件,尝试按端口 $PORT 查找并停止..."
PIDS_BY_PORT=""
if command -v netstat >/dev/null 2>&1; then
# Windows Git Bash (netstat -ano)
PIDS_BY_PORT="$(netstat -ano 2>/dev/null | grep -E "[:.]$PORT[[:space:]]" | grep LISTENING | awk '{print $NF}' | sort -u | tr '\n' ' ' || true)"
fi
if [ -z "$PIDS_BY_PORT" ] && command -v lsof >/dev/null 2>&1; then
PIDS_BY_PORT="$(lsof -ti tcp:$PORT 2>/dev/null | sort -u | tr '\n' ' ' || true)"
fi
if [ -z "$PIDS_BY_PORT" ] && command -v ss >/dev/null 2>&1; then
PIDS_BY_PORT="$(ss -ltnp 2>/dev/null | grep ':$PORT' | sed -n 's/.*pid=\([0-9]\+\).*/\1/p' | sort -u | tr '\n' ' ' || true)"
fi
if [ -z "$PIDS_BY_PORT" ]; then
echo "✅ 未发现 $PORT 端口监听进程(无需停止)"
exit 0
fi
for p in $PIDS_BY_PORT; do
echo "🔪 终止进程 pid=$p (by port $PORT)..."
kill_pid "$p"
done
echo "✅ 已停止by port $PORT"

277
request/view_logs.sh Normal file
View File

@@ -0,0 +1,277 @@
#!/bin/bash
# 直接设置项目根目录
PROJECT_ROOT="$(pwd)"
LOGS_DIR="${LOGS_DIR:-$PROJECT_ROOT/logs}"
echo "📋 X-Request 高级日志查看工具"
echo "============================"
TODAY=$(date +%Y-%m-%d)
TODAY_DIR="$LOGS_DIR/$TODAY"
# 检查日志目录是否存在
if [ ! -d "$LOGS_DIR" ]; then
echo "❌ 日志目录不存在: $LOGS_DIR"
echo "💡 请先启动应用生成日志"
exit 1
fi
echo "📁 日志目录: $LOGS_DIR"
echo "📅 今天的日志: $TODAY_DIR"
echo ""
# 显示可用日期目录
echo "📅 可用的日志日期:"
echo "=================="
ls -1 "$LOGS_DIR" | grep -E "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" | sort -r
echo ""
# 显示今天的日志文件统计
if [ -d "$TODAY_DIR" ]; then
echo "📄 今天的日志文件:"
echo "=================="
echo "成功日志:"
ls -lh "$TODAY_DIR"/*_success.log 2>/dev/null | awk '{print " " $9 " (" $5 ")"}' || echo " 无成功日志"
echo ""
echo "失败日志:"
ls -lh "$TODAY_DIR"/*_error.log 2>/dev/null | awk '{print " " $9 " (" $5 ")"}' || echo " 无失败日志"
echo ""
else
echo "⚠️ 今天还没有日志文件"
echo ""
fi
# 主循环函数
show_menu_and_process() {
while true; do
echo ""
echo "请选择操作:"
echo "1) 实时监控成功日志"
echo "2) 实时监控失败日志"
echo "3) 搜索今天的日志 (按关键词)"
echo "4) 选择特定日期的日志"
echo "5) 查看日志统计"
echo "6) 清理旧日志 (7天前)"
echo "0) 退出"
echo ""
read -p "请输入选项 (0-6): " choice
case $choice in
1)
if [ ! -d "$TODAY_DIR" ]; then
echo "❌ 今天的日志目录不存在"
continue
fi
# 获取可用路由列表
echo "📋 请选择要监控的路由:"
echo "======================"
success_routes=$(find "$TODAY_DIR" -name "*_success.log" -exec basename {} _success.log \; 2>/dev/null | sort | uniq)
if [ -z "$success_routes" ]; then
echo "❌ 今天没有成功日志的路由"
continue
fi
echo "$success_routes" | nl
echo ""
read -p "请输入路由编号: " route_num
selected_route=$(echo "$success_routes" | sed -n "${route_num}p")
if [ -n "$selected_route" ] && [ -f "$TODAY_DIR/${selected_route}_success.log" ]; then
echo "🔄 实时监控 $selected_route 路由成功日志"
echo "========================================"
echo "💡 提示:监控将持续运行,输入 'x' 并按回车可退出监控"
echo ""
# 在后台启动tail命令
tail -f "$TODAY_DIR/${selected_route}_success.log" &
TAIL_PID=$!
# 等待用户输入x退出
while true; do
read -p "" input
if [ "$input" = "x" ] || [ "$input" = "X" ]; then
kill $TAIL_PID 2>/dev/null
echo ""
echo "✅ 已停止实时监控"
break
fi
done
else
echo "❌ 无效的路由选择"
fi
;;
2)
if [ ! -d "$TODAY_DIR" ]; then
echo "❌ 今天的日志目录不存在"
continue
fi
# 获取可用路由列表
echo "📋 请选择要监控的路由:"
echo "======================"
error_routes=$(find "$TODAY_DIR" -name "*_error.log" -exec basename {} _error.log \; 2>/dev/null | sort | uniq)
if [ -z "$error_routes" ]; then
echo "❌ 今天没有失败日志的路由"
continue
fi
echo "$error_routes" | nl
echo ""
read -p "请输入路由编号: " route_num
selected_route=$(echo "$error_routes" | sed -n "${route_num}p")
if [ -n "$selected_route" ] && [ -f "$TODAY_DIR/${selected_route}_error.log" ]; then
echo "🔄 实时监控 $selected_route 路由失败日志"
echo "========================================"
echo "💡 提示:监控将持续运行,输入 'x' 并按回车可退出监控"
echo ""
# 在后台启动tail命令
tail -f "$TODAY_DIR/${selected_route}_error.log" &
TAIL_PID=$!
# 等待用户输入x退出
while true; do
read -p "" input
if [ "$input" = "x" ] || [ "$input" = "X" ]; then
kill $TAIL_PID 2>/dev/null
echo ""
echo "✅ 已停止实时监控"
break
fi
done
else
echo "❌ 无效的路由选择"
fi
;;
3)
read -p "请输入搜索关键词: " keyword
if [ -z "$keyword" ]; then
echo "❌ 搜索关键词不能为空"
continue
fi
echo "🔍 搜索结果 (关键词: $keyword)"
echo "==================================="
found_any=false
# 使用临时文件来避免子shell问题
temp_result="/tmp/search_result_$$"
> "$temp_result"
# 搜索包含关键词的文件
find "$TODAY_DIR" -name "*.log" -exec grep -l "$keyword" {} \; 2>/dev/null | while read file; do
echo "📄 $file:" >> "$temp_result"
grep "$keyword" "$file" | tail -5 >> "$temp_result"
echo "" >> "$temp_result"
done
# 检查是否有结果
if [ -s "$temp_result" ]; then
cat "$temp_result"
else
echo "❌ 未找到匹配的日志"
fi
# 清理临时文件
rm -f "$temp_result"
;;
4)
echo "📅 选择特定日期:"
echo "=================="
echo "可用的日期:"
ls -1 "$LOGS_DIR" | grep -E "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" | sort -r | nl
echo ""
read -p "请输入日期编号 (或按Enter返回): " date_num
if [ -z "$date_num" ]; then
continue
fi
selected_date=$(ls -1 "$LOGS_DIR" | grep -E "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" | sort -r | sed -n "${date_num}p")
if [ -n "$selected_date" ] && [ -d "$LOGS_DIR/$selected_date" ]; then
echo ""
echo "📄 $selected_date 的日志文件:"
echo "========================"
ls -la "$LOGS_DIR/$selected_date/" | grep -E "\.log$"
# 询问是否查看该日期的某个文件
echo ""
read -p "是否查看该日期的某个日志文件? (y/N): " view_file
if [[ $view_file == [yY] ]]; then
echo "该日期的日志文件列表:"
ls -1 "$LOGS_DIR/$selected_date/" | grep -E "\.log$" | nl
read -p "请输入文件编号: " file_num
selected_file=$(ls -1 "$LOGS_DIR/$selected_date/" | grep -E "\.log$" | sed -n "${file_num}p")
if [ -f "$LOGS_DIR/$selected_date/$selected_file" ]; then
echo ""
echo "📄 查看文件: $LOGS_DIR/$selected_date/$selected_file"
echo "==========================================="
tail -20 "$LOGS_DIR/$selected_date/$selected_file"
else
echo "❌ 无效的文件选择"
fi
fi
else
echo "❌ 无效的日期选择"
fi
;;
5)
echo "📊 日志统计:"
echo "============"
if [ -d "$TODAY_DIR" ]; then
success_count=$(find "$TODAY_DIR" -name "*_success.log" -exec wc -l {} + 2>/dev/null | tail -1 | awk '{print $1}' || echo "0")
error_count=$(find "$TODAY_DIR" -name "*_error.log" -exec wc -l {} + 2>/dev/null | tail -1 | awk '{print $1}' || echo "0")
echo "今天的日志统计:"
echo " 成功请求: $success_count"
echo " 失败请求: $error_count"
if [ $((success_count + error_count)) -gt 0 ]; then
success_rate=$(echo "scale=2; $success_count * 100 / ($success_count + $error_count)" | bc 2>/dev/null || echo "0")
echo " 成功率: ${success_rate}%"
fi
echo ""
echo "路由统计 (按请求条数):"
find "$TODAY_DIR" -name "*.log" -exec basename {} .log \; | sort | uniq -c | sort -nr
echo ""
echo "文件大小统计:"
find "$TODAY_DIR" -name "*.log" -exec ls -lh {} \; | awk '{print " " $9 " (" $5 ")"}'
else
echo "❌ 今天没有日志文件"
fi
;;
6)
read -p "确认清理7天前的日志? (y/N): " confirm
if [[ $confirm == [yY] ]]; then
echo "🧹 正在清理7天前的日志..."
deleted_dirs=$(find "$LOGS_DIR" -type d -name "????-??-??" -mtime +7 -exec rm -rf {} \; 2>/dev/null | wc -l)
echo "✅ 旧日志清理完成,删除了 $deleted_dirs 个目录"
else
echo "❌ 操作已取消"
fi
;;
0)
echo "👋 退出日志查看工具"
exit 0
;;
*)
echo "❌ 无效选项请输入0-6之间的数字"
;;
esac
# 如果不是退出选项,等待用户按回车继续
if [ "$choice" != "0" ]; then
echo ""
read -p "按 Enter 键继续..." dummy
fi
done
}
# 启动主循环
show_menu_and_process