perf: optimize polling, stats cache, and frontend chunk splitting

This commit is contained in:
2026-02-07 11:41:49 +08:00
parent 21c537da10
commit 04b94d7fb2
73 changed files with 516 additions and 203 deletions

View File

@@ -4,6 +4,7 @@ from __future__ import annotations
import os
import socket
import threading
import time
from datetime import datetime
@@ -16,12 +17,37 @@ from services.time_utils import BEIJING_TZ, get_beijing_now
logger = get_logger("app")
_ADMIN_STATS_CACHE_TTL = max(1.0, float(os.environ.get("ADMIN_STATS_CACHE_TTL_SECONDS", "5")))
_admin_stats_cache: dict[str, object] = {"expires_at_monotonic": 0.0, "data": None}
_admin_stats_cache_lock = threading.Lock()
_DOCKER_STATS_CACHE_TTL = max(2.0, float(os.environ.get("ADMIN_DOCKER_STATS_CACHE_TTL_SECONDS", "5")))
_docker_stats_cache: dict[str, object] = {"expires_at_monotonic": 0.0, "data": None}
_docker_stats_cache_lock = threading.Lock()
def _get_system_stats_cached() -> dict:
now = time.monotonic()
with _admin_stats_cache_lock:
expires_at = float(_admin_stats_cache.get("expires_at_monotonic") or 0.0)
cached_data = _admin_stats_cache.get("data")
if isinstance(cached_data, dict) and now < expires_at:
return dict(cached_data)
fresh_data = database.get_system_stats() or {}
with _admin_stats_cache_lock:
_admin_stats_cache["data"] = dict(fresh_data)
_admin_stats_cache["expires_at_monotonic"] = now + _ADMIN_STATS_CACHE_TTL
return dict(fresh_data)
@admin_api_bp.route("/stats", methods=["GET"])
@admin_required
def get_system_stats():
"""获取系统统计"""
stats = database.get_system_stats()
stats = _get_system_stats_cached()
stats["admin_username"] = session.get("admin_username", "admin")
return jsonify(stats)
@@ -132,6 +158,13 @@ def _fill_host_service_stats(docker_status: dict) -> None:
@admin_required
def get_docker_stats():
"""获取容器运行状态(非容器部署时返回当前服务进程状态)"""
now = time.monotonic()
with _docker_stats_cache_lock:
expires_at = float(_docker_stats_cache.get("expires_at_monotonic") or 0.0)
cached_data = _docker_stats_cache.get("data")
if isinstance(cached_data, dict) and now < expires_at:
return jsonify(dict(cached_data))
docker_status = {
"running": False,
"container_name": "N/A",
@@ -245,4 +278,8 @@ def get_docker_stats():
logger.exception(f"获取容器/服务状态失败: {e}")
docker_status["status"] = f"Error: {e}"
with _docker_stats_cache_lock:
_docker_stats_cache["data"] = dict(docker_status)
_docker_stats_cache["expires_at_monotonic"] = now + _DOCKER_STATS_CACHE_TTL
return jsonify(docker_status)