feat: 添加安全模块 + Dockerfile添加curl支持健康检查
主要更新: - 新增 security/ 安全模块 (风险评估、威胁检测、蜜罐等) - Dockerfile 添加 curl 以支持 Docker 健康检查 - 前端页面更新 (管理后台、用户端) - 数据库迁移和 schema 更新 - 新增 kdocs 上传服务 - 添加安全相关测试用例 Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
1494
services/kdocs_uploader.py
Normal file
1494
services/kdocs_uploader.py
Normal file
File diff suppressed because it is too large
Load Diff
@@ -4,6 +4,7 @@ from __future__ import annotations
|
||||
|
||||
import threading
|
||||
import time
|
||||
from datetime import datetime
|
||||
|
||||
from app_config import get_config
|
||||
from app_logger import get_logger
|
||||
@@ -26,6 +27,9 @@ USER_ACCOUNTS_EXPIRE_SECONDS = int(getattr(config, "USER_ACCOUNTS_EXPIRE_SECONDS
|
||||
BATCH_TASK_EXPIRE_SECONDS = int(getattr(config, "BATCH_TASK_EXPIRE_SECONDS", 21600))
|
||||
PENDING_RANDOM_EXPIRE_SECONDS = int(getattr(config, "PENDING_RANDOM_EXPIRE_SECONDS", 7200))
|
||||
|
||||
# 金山文档离线通知状态:每次掉线只通知一次,恢复在线后重置
|
||||
_kdocs_offline_notified: bool = False
|
||||
|
||||
|
||||
def cleanup_expired_data() -> None:
|
||||
"""定期清理过期数据,防止内存泄漏(逻辑保持不变)。"""
|
||||
@@ -91,6 +95,87 @@ def cleanup_expired_data() -> None:
|
||||
logger.debug(f"已清理 {deleted_random} 个过期随机延迟任务")
|
||||
|
||||
|
||||
def check_kdocs_online_status() -> None:
|
||||
"""检测金山文档登录状态,如果离线则发送邮件通知管理员(每次掉线只通知一次)"""
|
||||
global _kdocs_offline_notified
|
||||
|
||||
try:
|
||||
import database
|
||||
from services.kdocs_uploader import get_kdocs_uploader
|
||||
|
||||
# 获取系统配置
|
||||
cfg = database.get_system_config()
|
||||
if not cfg:
|
||||
return
|
||||
|
||||
# 检查是否启用了金山文档功能
|
||||
kdocs_enabled = int(cfg.get("kdocs_enabled") or 0)
|
||||
if not kdocs_enabled:
|
||||
return
|
||||
|
||||
# 检查是否启用了管理员通知
|
||||
admin_notify_enabled = int(cfg.get("kdocs_admin_notify_enabled") or 0)
|
||||
admin_notify_email = (cfg.get("kdocs_admin_notify_email") or "").strip()
|
||||
if not admin_notify_enabled or not admin_notify_email:
|
||||
return
|
||||
|
||||
# 获取金山文档状态
|
||||
kdocs = get_kdocs_uploader()
|
||||
status = kdocs.get_status()
|
||||
login_required = status.get("login_required", False)
|
||||
last_login_ok = status.get("last_login_ok")
|
||||
|
||||
# 如果需要登录或最后登录状态不是成功
|
||||
is_offline = login_required or (last_login_ok is False)
|
||||
|
||||
if is_offline:
|
||||
# 已经通知过了,不再重复通知
|
||||
if _kdocs_offline_notified:
|
||||
logger.debug("[KDocs监控] 金山文档离线,已通知过,跳过重复通知")
|
||||
return
|
||||
|
||||
# 发送邮件通知
|
||||
try:
|
||||
import email_service
|
||||
|
||||
now_str = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
||||
subject = "【金山文档离线告警】需要重新登录"
|
||||
body = f"""
|
||||
您好,
|
||||
|
||||
系统检测到金山文档上传功能已离线,需要重新扫码登录。
|
||||
|
||||
检测时间:{now_str}
|
||||
状态详情:
|
||||
- 需要登录:{login_required}
|
||||
- 上次登录状态:{last_login_ok}
|
||||
|
||||
请尽快登录后台,在"系统配置"→"金山文档上传"中点击"获取登录二维码"重新登录。
|
||||
|
||||
---
|
||||
此邮件由系统自动发送,请勿直接回复。
|
||||
"""
|
||||
email_service.send_email_async(
|
||||
to_email=admin_notify_email,
|
||||
subject=subject,
|
||||
body=body,
|
||||
email_type="kdocs_offline_alert",
|
||||
)
|
||||
_kdocs_offline_notified = True # 标记为已通知
|
||||
logger.warning(f"[KDocs监控] 金山文档离线,已发送通知邮件到 {admin_notify_email}")
|
||||
except Exception as e:
|
||||
logger.error(f"[KDocs监控] 发送离线通知邮件失败: {e}")
|
||||
else:
|
||||
# 恢复在线,重置通知状态
|
||||
if _kdocs_offline_notified:
|
||||
logger.info("[KDocs监控] 金山文档已恢复在线,重置通知状态")
|
||||
_kdocs_offline_notified = False
|
||||
logger.debug("[KDocs监控] 金山文档状态正常")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"[KDocs监控] 检测失败: {e}")
|
||||
|
||||
|
||||
def start_cleanup_scheduler() -> None:
|
||||
"""启动定期清理调度器"""
|
||||
|
||||
@@ -106,3 +191,22 @@ def start_cleanup_scheduler() -> None:
|
||||
cleanup_thread.start()
|
||||
logger.info("内存清理调度器已启动")
|
||||
|
||||
|
||||
def start_kdocs_monitor() -> None:
|
||||
"""启动金山文档状态监控"""
|
||||
|
||||
def monitor_loop():
|
||||
# 启动后等待 60 秒再开始检测(给系统初始化的时间)
|
||||
time.sleep(60)
|
||||
while True:
|
||||
try:
|
||||
check_kdocs_online_status()
|
||||
time.sleep(300) # 每5分钟检测一次
|
||||
except Exception as e:
|
||||
logger.error(f"[KDocs监控] 监控任务执行失败: {e}")
|
||||
time.sleep(60)
|
||||
|
||||
monitor_thread = threading.Thread(target=monitor_loop, daemon=True, name="kdocs-monitor")
|
||||
monitor_thread.start()
|
||||
logger.info("[KDocs监控] 金山文档状态监控已启动(每5分钟检测一次)")
|
||||
|
||||
|
||||
@@ -87,19 +87,32 @@ def run_scheduled_task(skip_weekday_check: bool = False) -> None:
|
||||
cfg = database.get_system_config()
|
||||
enable_screenshot_scheduled = cfg.get("enable_screenshot", 0) == 1
|
||||
|
||||
user_accounts = {}
|
||||
account_ids = []
|
||||
for user in approved_users:
|
||||
user_id = user["id"]
|
||||
accounts = safe_get_user_accounts_snapshot(user_id)
|
||||
if not accounts:
|
||||
load_user_accounts(user_id)
|
||||
accounts = safe_get_user_accounts_snapshot(user_id)
|
||||
if accounts:
|
||||
user_accounts[user_id] = accounts
|
||||
account_ids.extend(list(accounts.keys()))
|
||||
|
||||
account_statuses = database.get_account_status_batch(account_ids)
|
||||
|
||||
for user in approved_users:
|
||||
user_id = user["id"]
|
||||
accounts = user_accounts.get(user_id, {})
|
||||
if not accounts:
|
||||
continue
|
||||
for account_id, account in accounts.items():
|
||||
total_accounts += 1
|
||||
|
||||
if account.is_running:
|
||||
continue
|
||||
|
||||
account_status_info = database.get_account_status(account_id)
|
||||
account_status_info = account_statuses.get(str(account_id))
|
||||
if account_status_info:
|
||||
status = account_status_info["status"] if "status" in account_status_info.keys() else "active"
|
||||
if status == "suspended":
|
||||
@@ -150,6 +163,16 @@ def scheduled_task_worker() -> None:
|
||||
"""定时任务工作线程"""
|
||||
import schedule
|
||||
|
||||
def decay_risk_scores():
|
||||
"""风险分衰减:每天定时执行一次"""
|
||||
try:
|
||||
from security.risk_scorer import RiskScorer
|
||||
|
||||
RiskScorer().decay_scores()
|
||||
logger.info("[定时任务] 风险分衰减已执行")
|
||||
except Exception as e:
|
||||
logger.exception(f"[定时任务] 风险分衰减执行失败: {e}")
|
||||
|
||||
def cleanup_expired_captcha():
|
||||
try:
|
||||
deleted_count = safe_cleanup_expired_captcha()
|
||||
@@ -362,7 +385,12 @@ def scheduled_task_worker() -> None:
|
||||
if schedule_time_cst != str(schedule_time_raw or "").strip():
|
||||
logger.warning(f"[定时任务] 系统定时时间格式无效,已回退到 {schedule_time_cst} (原值: {schedule_time_raw!r})")
|
||||
|
||||
signature = (schedule_enabled, schedule_time_cst)
|
||||
risk_decay_time_raw = os.environ.get("RISK_SCORE_DECAY_TIME_CST", "04:00")
|
||||
risk_decay_time_cst = _normalize_hhmm(risk_decay_time_raw, default="04:00")
|
||||
if risk_decay_time_cst != str(risk_decay_time_raw or "").strip():
|
||||
logger.warning(f"[定时任务] 风险分衰减时间格式无效,已回退到 {risk_decay_time_cst} (原值: {risk_decay_time_raw!r})")
|
||||
|
||||
signature = (schedule_enabled, schedule_time_cst, risk_decay_time_cst)
|
||||
config_changed = schedule_state.get("signature") != signature
|
||||
is_first_run = schedule_state.get("signature") is None
|
||||
if (not force) and (not config_changed):
|
||||
@@ -374,6 +402,8 @@ def scheduled_task_worker() -> None:
|
||||
cleanup_time_cst = "03:00"
|
||||
schedule.every().day.at(cleanup_time_cst).do(cleanup_old_data)
|
||||
|
||||
schedule.every().day.at(risk_decay_time_cst).do(decay_risk_scores)
|
||||
|
||||
schedule.every().hour.do(cleanup_expired_captcha)
|
||||
|
||||
quota_reset_time_cst = "00:00"
|
||||
@@ -381,6 +411,7 @@ def scheduled_task_worker() -> None:
|
||||
|
||||
if is_first_run:
|
||||
logger.info(f"[定时任务] 已设置数据清理任务: 每天 CST {cleanup_time_cst}")
|
||||
logger.info(f"[定时任务] 已设置风险分衰减: 每天 CST {risk_decay_time_cst}")
|
||||
logger.info(f"[定时任务] 已设置验证码清理任务: 每小时执行一次")
|
||||
logger.info(f"[定时任务] 已设置SMTP配额重置: 每天 CST {quota_reset_time_cst}")
|
||||
|
||||
|
||||
@@ -3,15 +3,16 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import time
|
||||
|
||||
import database
|
||||
import email_service
|
||||
from api_browser import APIBrowser, get_cookie_jar_path, is_cookie_jar_fresh
|
||||
from app_config import get_config
|
||||
from app_logger import get_logger
|
||||
from browser_pool_worker import get_browser_worker_pool
|
||||
from playwright_automation import PlaywrightAutomation
|
||||
from services.browser_manager import get_browser_manager
|
||||
from services.client_log import log_to_client
|
||||
from services.runtime import get_socketio
|
||||
from services.state import safe_get_account, safe_remove_task_status, safe_update_task_status
|
||||
@@ -24,6 +25,165 @@ config = get_config()
|
||||
SCREENSHOTS_DIR = config.SCREENSHOTS_DIR
|
||||
os.makedirs(SCREENSHOTS_DIR, exist_ok=True)
|
||||
|
||||
_WKHTMLTOIMAGE_TIMEOUT_SECONDS = int(os.environ.get("WKHTMLTOIMAGE_TIMEOUT_SECONDS", "60"))
|
||||
_WKHTMLTOIMAGE_JS_DELAY_MS = int(os.environ.get("WKHTMLTOIMAGE_JS_DELAY_MS", "3000"))
|
||||
_WKHTMLTOIMAGE_WIDTH = int(os.environ.get("WKHTMLTOIMAGE_WIDTH", "1920"))
|
||||
_WKHTMLTOIMAGE_HEIGHT = int(os.environ.get("WKHTMLTOIMAGE_HEIGHT", "1080"))
|
||||
_WKHTMLTOIMAGE_QUALITY = int(os.environ.get("WKHTMLTOIMAGE_QUALITY", "95"))
|
||||
_WKHTMLTOIMAGE_ZOOM = float(os.environ.get("WKHTMLTOIMAGE_ZOOM", "1.0"))
|
||||
_WKHTMLTOIMAGE_FULL_PAGE = str(os.environ.get("WKHTMLTOIMAGE_FULL_PAGE", "")).strip().lower() in (
|
||||
"1",
|
||||
"true",
|
||||
"yes",
|
||||
"on",
|
||||
)
|
||||
_env_crop_w = os.environ.get("WKHTMLTOIMAGE_CROP_WIDTH")
|
||||
_env_crop_h = os.environ.get("WKHTMLTOIMAGE_CROP_HEIGHT")
|
||||
_WKHTMLTOIMAGE_CROP_WIDTH = int(_env_crop_w) if _env_crop_w is not None else _WKHTMLTOIMAGE_WIDTH
|
||||
_WKHTMLTOIMAGE_CROP_HEIGHT = (
|
||||
int(_env_crop_h) if _env_crop_h is not None else (_WKHTMLTOIMAGE_HEIGHT if _WKHTMLTOIMAGE_HEIGHT > 0 else 0)
|
||||
)
|
||||
_WKHTMLTOIMAGE_CROP_X = int(os.environ.get("WKHTMLTOIMAGE_CROP_X", "0"))
|
||||
_WKHTMLTOIMAGE_CROP_Y = int(os.environ.get("WKHTMLTOIMAGE_CROP_Y", "0"))
|
||||
_WKHTMLTOIMAGE_UA = os.environ.get(
|
||||
"WKHTMLTOIMAGE_USER_AGENT",
|
||||
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36",
|
||||
)
|
||||
|
||||
|
||||
def _resolve_wkhtmltoimage_path() -> str | None:
|
||||
return os.environ.get("WKHTMLTOIMAGE_PATH") or shutil.which("wkhtmltoimage")
|
||||
|
||||
|
||||
def _read_cookie_pairs(cookies_path: str) -> list[tuple[str, str]]:
|
||||
if not cookies_path or not os.path.exists(cookies_path):
|
||||
return []
|
||||
pairs = []
|
||||
try:
|
||||
with open(cookies_path, "r", encoding="utf-8", errors="ignore") as f:
|
||||
for line in f:
|
||||
line = line.strip()
|
||||
if not line or line.startswith("#"):
|
||||
continue
|
||||
parts = line.split("\t")
|
||||
if len(parts) < 7:
|
||||
continue
|
||||
name = parts[5].strip()
|
||||
value = parts[6].strip()
|
||||
if name:
|
||||
pairs.append((name, value))
|
||||
except Exception:
|
||||
return []
|
||||
return pairs
|
||||
|
||||
|
||||
def _select_cookie_pairs(pairs: list[tuple[str, str]]) -> list[tuple[str, str]]:
|
||||
preferred_names = {"ASP.NET_SessionId", ".ASPXAUTH"}
|
||||
preferred = [(name, value) for name, value in pairs if name in preferred_names and value]
|
||||
if preferred:
|
||||
return preferred
|
||||
return [(name, value) for name, value in pairs if name and value and name.isascii() and value.isascii()]
|
||||
|
||||
|
||||
def _ensure_login_cookies(account, proxy_config, log_callback) -> bool:
|
||||
"""确保有可用的登录 cookies(通过 API 登录刷新)"""
|
||||
try:
|
||||
with APIBrowser(log_callback=log_callback, proxy_config=proxy_config) as api_browser:
|
||||
if not api_browser.login(account.username, account.password):
|
||||
return False
|
||||
return api_browser.save_cookies_for_screenshot(account.username)
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
|
||||
def take_screenshot_wkhtmltoimage(
|
||||
url: str,
|
||||
output_path: str,
|
||||
cookies_path: str | None = None,
|
||||
proxy_server: str | None = None,
|
||||
run_script: str | None = None,
|
||||
window_status: str | None = None,
|
||||
log_callback=None,
|
||||
) -> bool:
|
||||
wkhtmltoimage_path = _resolve_wkhtmltoimage_path()
|
||||
if not wkhtmltoimage_path:
|
||||
if log_callback:
|
||||
log_callback("wkhtmltoimage 未安装或不在 PATH 中")
|
||||
return False
|
||||
|
||||
ext = os.path.splitext(output_path)[1].lower()
|
||||
image_format = "jpg" if ext in (".jpg", ".jpeg") else "png"
|
||||
|
||||
cmd = [
|
||||
wkhtmltoimage_path,
|
||||
"--format",
|
||||
image_format,
|
||||
"--width",
|
||||
str(_WKHTMLTOIMAGE_WIDTH),
|
||||
"--disable-smart-width",
|
||||
"--javascript-delay",
|
||||
str(_WKHTMLTOIMAGE_JS_DELAY_MS),
|
||||
"--load-error-handling",
|
||||
"ignore",
|
||||
"--enable-local-file-access",
|
||||
"--encoding",
|
||||
"utf-8",
|
||||
]
|
||||
|
||||
if _WKHTMLTOIMAGE_UA:
|
||||
cmd.extend(["--custom-header", "User-Agent", _WKHTMLTOIMAGE_UA, "--custom-header-propagation"])
|
||||
|
||||
if image_format in ("jpg", "jpeg"):
|
||||
cmd.extend(["--quality", str(_WKHTMLTOIMAGE_QUALITY)])
|
||||
|
||||
if _WKHTMLTOIMAGE_HEIGHT > 0 and not _WKHTMLTOIMAGE_FULL_PAGE:
|
||||
cmd.extend(["--height", str(_WKHTMLTOIMAGE_HEIGHT)])
|
||||
|
||||
if abs(_WKHTMLTOIMAGE_ZOOM - 1.0) > 1e-6:
|
||||
cmd.extend(["--zoom", str(_WKHTMLTOIMAGE_ZOOM)])
|
||||
|
||||
if not _WKHTMLTOIMAGE_FULL_PAGE and (_WKHTMLTOIMAGE_CROP_WIDTH > 0 or _WKHTMLTOIMAGE_CROP_HEIGHT > 0):
|
||||
cmd.extend(["--crop-x", str(_WKHTMLTOIMAGE_CROP_X), "--crop-y", str(_WKHTMLTOIMAGE_CROP_Y)])
|
||||
if _WKHTMLTOIMAGE_CROP_WIDTH > 0:
|
||||
cmd.extend(["--crop-w", str(_WKHTMLTOIMAGE_CROP_WIDTH)])
|
||||
if _WKHTMLTOIMAGE_CROP_HEIGHT > 0:
|
||||
cmd.extend(["--crop-h", str(_WKHTMLTOIMAGE_CROP_HEIGHT)])
|
||||
|
||||
if run_script:
|
||||
cmd.extend(["--run-script", run_script])
|
||||
if window_status:
|
||||
cmd.extend(["--window-status", window_status])
|
||||
|
||||
if cookies_path:
|
||||
cookie_pairs = _select_cookie_pairs(_read_cookie_pairs(cookies_path))
|
||||
if cookie_pairs:
|
||||
for name, value in cookie_pairs:
|
||||
cmd.extend(["--cookie", name, value])
|
||||
else:
|
||||
cmd.extend(["--cookie-jar", cookies_path])
|
||||
|
||||
if proxy_server:
|
||||
cmd.extend(["--proxy", proxy_server])
|
||||
|
||||
cmd.extend([url, output_path])
|
||||
|
||||
try:
|
||||
result = subprocess.run(cmd, capture_output=True, text=True, timeout=_WKHTMLTOIMAGE_TIMEOUT_SECONDS)
|
||||
if result.returncode != 0:
|
||||
if log_callback:
|
||||
err_msg = (result.stderr or result.stdout or "").strip()
|
||||
log_callback(f"wkhtmltoimage 截图失败: {err_msg[:200]}")
|
||||
return False
|
||||
return True
|
||||
except subprocess.TimeoutExpired:
|
||||
if log_callback:
|
||||
log_callback("wkhtmltoimage 截图超时")
|
||||
return False
|
||||
except Exception as e:
|
||||
if log_callback:
|
||||
log_callback(f"wkhtmltoimage 截图异常: {e}")
|
||||
return False
|
||||
|
||||
|
||||
def _emit(event: str, data: object, *, room: str | None = None) -> None:
|
||||
try:
|
||||
@@ -42,7 +202,7 @@ def take_screenshot_for_account(
|
||||
task_start_time=None,
|
||||
browse_result=None,
|
||||
):
|
||||
"""为账号任务完成后截图(使用工作线程池,真正的浏览器复用)"""
|
||||
"""为账号任务完成后截图(使用截图线程池并发执行)"""
|
||||
account = safe_get_account(user_id, account_id)
|
||||
if not account:
|
||||
return
|
||||
@@ -63,9 +223,11 @@ def take_screenshot_for_account(
|
||||
_emit("account_update", acc.to_dict(), room=f"user_{user_id}")
|
||||
|
||||
max_retries = 3
|
||||
proxy_config = account.proxy_config if hasattr(account, "proxy_config") else None
|
||||
proxy_server = proxy_config.get("server") if proxy_config else None
|
||||
cookie_path = get_cookie_jar_path(account.username)
|
||||
|
||||
for attempt in range(1, max_retries + 1):
|
||||
automation = None
|
||||
try:
|
||||
safe_update_task_status(
|
||||
account_id,
|
||||
@@ -75,100 +237,70 @@ def take_screenshot_for_account(
|
||||
if attempt > 1:
|
||||
log_to_client(f"🔄 第 {attempt} 次截图尝试...", user_id, account_id)
|
||||
|
||||
worker_id = browser_instance.get("worker_id", "?") if isinstance(browser_instance, dict) else "?"
|
||||
use_count = browser_instance.get("use_count", 0) if isinstance(browser_instance, dict) else 0
|
||||
log_to_client(
|
||||
f"使用Worker-{browser_instance['worker_id']}的浏览器(已使用{browser_instance['use_count']}次)",
|
||||
f"使用Worker-{worker_id}执行截图(已执行{use_count}次)",
|
||||
user_id,
|
||||
account_id,
|
||||
)
|
||||
|
||||
proxy_config = account.proxy_config if hasattr(account, "proxy_config") else None
|
||||
automation = PlaywrightAutomation(get_browser_manager(), account_id, proxy_config=proxy_config)
|
||||
automation.playwright = browser_instance["playwright"]
|
||||
automation.browser = browser_instance["browser"]
|
||||
|
||||
def custom_log(message: str):
|
||||
log_to_client(message, user_id, account_id)
|
||||
|
||||
automation.log = custom_log
|
||||
|
||||
log_to_client("登录中...", user_id, account_id)
|
||||
login_result = automation.quick_login(account.username, account.password, account.remember)
|
||||
if not login_result["success"]:
|
||||
error_message = login_result.get("message", "截图登录失败")
|
||||
log_to_client(f"截图登录失败: {error_message}", user_id, account_id)
|
||||
if attempt < max_retries:
|
||||
log_to_client("将重试...", user_id, account_id)
|
||||
time.sleep(2)
|
||||
continue
|
||||
log_to_client("❌ 截图失败: 登录失败", user_id, account_id)
|
||||
return {"success": False, "error": "登录失败"}
|
||||
if not is_cookie_jar_fresh(cookie_path) or attempt > 1:
|
||||
log_to_client("正在刷新登录态...", user_id, account_id)
|
||||
if not _ensure_login_cookies(account, proxy_config, custom_log):
|
||||
log_to_client("截图登录失败", user_id, account_id)
|
||||
if attempt < max_retries:
|
||||
log_to_client("将重试...", user_id, account_id)
|
||||
time.sleep(2)
|
||||
continue
|
||||
log_to_client("❌ 截图失败: 登录失败", user_id, account_id)
|
||||
return {"success": False, "error": "登录失败"}
|
||||
|
||||
log_to_client(f"导航到 '{browse_type}' 页面...", user_id, account_id)
|
||||
|
||||
# 截图场景:优先用 bz 参数直达页面(更稳定,避免页面按钮点击失败导致截图跑偏)
|
||||
navigated = False
|
||||
try:
|
||||
from urllib.parse import urlsplit
|
||||
from urllib.parse import urlsplit
|
||||
|
||||
parsed = urlsplit(config.ZSGL_LOGIN_URL)
|
||||
base = f"{parsed.scheme}://{parsed.netloc}"
|
||||
if "注册前" in str(browse_type):
|
||||
bz = 0
|
||||
else:
|
||||
bz = 2 # 应读
|
||||
target_url = f"{base}/admin/center.aspx?bz={bz}"
|
||||
# 目标:保留外层框架(左侧菜单/顶部栏),仅在 mainframe 内部导航到目标内容页
|
||||
iframe = None
|
||||
try:
|
||||
iframe = automation.get_iframe_safe(retry=True, max_retries=5)
|
||||
except Exception:
|
||||
iframe = None
|
||||
|
||||
if iframe:
|
||||
iframe.goto(target_url, timeout=60000)
|
||||
current_url = getattr(iframe, "url", "") or ""
|
||||
if "center.aspx" not in current_url:
|
||||
raise RuntimeError(f"unexpected_iframe_url:{current_url}")
|
||||
try:
|
||||
iframe.wait_for_load_state("networkidle", timeout=10000)
|
||||
except Exception:
|
||||
pass
|
||||
try:
|
||||
iframe.wait_for_selector("table.ltable", timeout=5000)
|
||||
except Exception:
|
||||
pass
|
||||
else:
|
||||
# 兜底:若获取不到 iframe,则退回到主页面直达
|
||||
automation.main_page.goto(target_url, timeout=60000)
|
||||
current_url = getattr(automation.main_page, "url", "") or ""
|
||||
if "center.aspx" not in current_url:
|
||||
raise RuntimeError(f"unexpected_url:{current_url}")
|
||||
try:
|
||||
automation.main_page.wait_for_load_state("networkidle", timeout=10000)
|
||||
except Exception:
|
||||
pass
|
||||
try:
|
||||
automation.main_page.wait_for_selector("table.ltable", timeout=5000)
|
||||
except Exception:
|
||||
pass
|
||||
navigated = True
|
||||
except Exception as nav_error:
|
||||
log_to_client(f"直达页面失败,将尝试按钮切换: {str(nav_error)[:120]}", user_id, account_id)
|
||||
|
||||
# 兼容兜底:若直达失败,则回退到原有按钮切换方式
|
||||
if not navigated:
|
||||
result = automation.browse_content(
|
||||
navigate_only=True,
|
||||
browse_type=browse_type,
|
||||
auto_next_page=False,
|
||||
auto_view_attachments=False,
|
||||
interval=0,
|
||||
should_stop_callback=None,
|
||||
)
|
||||
if not result.success and result.error_message:
|
||||
log_to_client(f"导航警告: {result.error_message}", user_id, account_id)
|
||||
|
||||
time.sleep(2)
|
||||
parsed = urlsplit(config.ZSGL_LOGIN_URL)
|
||||
base = f"{parsed.scheme}://{parsed.netloc}"
|
||||
if "注册前" in str(browse_type):
|
||||
bz = 0
|
||||
else:
|
||||
bz = 2 # 应读
|
||||
target_url = f"{base}/admin/center.aspx?bz={bz}"
|
||||
index_url = config.ZSGL_INDEX_URL or f"{base}/admin/index.aspx"
|
||||
run_script = (
|
||||
"(function(){"
|
||||
"function done(){window.status='ready';}"
|
||||
"function ensureNav(){try{if(typeof loadMenuTree==='function'){loadMenuTree(true);}}catch(e){}}"
|
||||
"function expandMenu(){"
|
||||
"try{var body=document.body;if(body&&body.classList.contains('lay-mini')){body.classList.remove('lay-mini');}}catch(e){}"
|
||||
"try{if(typeof mainPageResize==='function'){mainPageResize();}}catch(e){}"
|
||||
"try{if(typeof toggleMainMenu==='function' && document.body && document.body.classList.contains('lay-mini')){toggleMainMenu();}}catch(e){}"
|
||||
"try{var navRight=document.querySelector('.nav-right');if(navRight){navRight.style.display='block';}}catch(e){}"
|
||||
"try{var mainNav=document.getElementById('main-nav');if(mainNav){mainNav.style.display='block';}}catch(e){}"
|
||||
"}"
|
||||
"function navReady(){"
|
||||
"try{var nav=document.getElementById('sidebar-nav');return nav && nav.querySelectorAll('a').length>0;}catch(e){return false;}"
|
||||
"}"
|
||||
"function frameReady(){"
|
||||
"try{var f=document.getElementById('mainframe');return f && f.contentDocument && f.contentDocument.readyState==='complete';}catch(e){return false;}"
|
||||
"}"
|
||||
"function check(){"
|
||||
"if(navReady() && frameReady()){done();return;}"
|
||||
"setTimeout(check,300);"
|
||||
"}"
|
||||
"var f=document.getElementById('mainframe');"
|
||||
"ensureNav();"
|
||||
"expandMenu();"
|
||||
"if(!f){done();return;}"
|
||||
f"f.src='{target_url}';"
|
||||
"f.onload=function(){ensureNav();expandMenu();setTimeout(check,300);};"
|
||||
"setTimeout(check,5000);"
|
||||
"})();"
|
||||
)
|
||||
|
||||
timestamp = get_beijing_now().strftime("%Y%m%d_%H%M%S")
|
||||
|
||||
@@ -178,7 +310,22 @@ def take_screenshot_for_account(
|
||||
screenshot_filename = f"{username_prefix}_{login_account}_{browse_type}_{timestamp}.jpg"
|
||||
screenshot_path = os.path.join(SCREENSHOTS_DIR, screenshot_filename)
|
||||
|
||||
if automation.take_screenshot(screenshot_path):
|
||||
cookies_for_shot = cookie_path if is_cookie_jar_fresh(cookie_path) else None
|
||||
if take_screenshot_wkhtmltoimage(
|
||||
index_url,
|
||||
screenshot_path,
|
||||
cookies_path=cookies_for_shot,
|
||||
proxy_server=proxy_server,
|
||||
run_script=run_script,
|
||||
window_status="ready",
|
||||
log_callback=custom_log,
|
||||
) or take_screenshot_wkhtmltoimage(
|
||||
target_url,
|
||||
screenshot_path,
|
||||
cookies_path=cookies_for_shot,
|
||||
proxy_server=proxy_server,
|
||||
log_callback=custom_log,
|
||||
):
|
||||
if os.path.exists(screenshot_path) and os.path.getsize(screenshot_path) > 1000:
|
||||
log_to_client(f"✓ 截图成功: {screenshot_filename}", user_id, account_id)
|
||||
return {"success": True, "filename": screenshot_filename}
|
||||
@@ -197,15 +344,6 @@ def take_screenshot_for_account(
|
||||
if attempt < max_retries:
|
||||
log_to_client("将重试...", user_id, account_id)
|
||||
time.sleep(2)
|
||||
finally:
|
||||
if automation:
|
||||
try:
|
||||
if automation.context:
|
||||
automation.context.close()
|
||||
automation.context = None
|
||||
automation.page = None
|
||||
except Exception as e:
|
||||
logger.debug(f"关闭context时出错: {e}")
|
||||
|
||||
return {"success": False, "error": "截图失败,已重试3次"}
|
||||
|
||||
@@ -250,6 +388,35 @@ def take_screenshot_for_account(
|
||||
|
||||
account_name = account.remark if account.remark else account.username
|
||||
|
||||
try:
|
||||
if screenshot_path and result and result.get("success"):
|
||||
cfg = database.get_system_config() or {}
|
||||
if int(cfg.get("kdocs_enabled", 0) or 0) == 1:
|
||||
doc_url = (cfg.get("kdocs_doc_url") or "").strip()
|
||||
if doc_url:
|
||||
user_cfg = database.get_user_kdocs_settings(user_id) or {}
|
||||
if int(user_cfg.get("kdocs_auto_upload", 0) or 0) == 1:
|
||||
unit = (user_cfg.get("kdocs_unit") or cfg.get("kdocs_default_unit") or "").strip()
|
||||
name = (account.remark or "").strip()
|
||||
if unit and name:
|
||||
from services.kdocs_uploader import get_kdocs_uploader
|
||||
ok = get_kdocs_uploader().enqueue_upload(
|
||||
user_id=user_id,
|
||||
account_id=account_id,
|
||||
unit=unit,
|
||||
name=name,
|
||||
image_path=screenshot_path,
|
||||
)
|
||||
if not ok:
|
||||
log_to_client("表格上传排队失败: 队列已满", user_id, account_id)
|
||||
else:
|
||||
if not unit:
|
||||
log_to_client("表格上传跳过: 未配置县区", user_id, account_id)
|
||||
if not name:
|
||||
log_to_client("表格上传跳过: 账号备注为空", user_id, account_id)
|
||||
except Exception as kdocs_error:
|
||||
logger.warning(f"表格上传任务提交失败: {kdocs_error}")
|
||||
|
||||
if batch_id:
|
||||
_batch_task_record_result(
|
||||
batch_id=batch_id,
|
||||
|
||||
@@ -573,8 +573,16 @@ def run_task(user_id, account_id, browse_type, enable_screenshot=True, source="m
|
||||
|
||||
with APIBrowser(log_callback=custom_log, proxy_config=proxy_config) as api_browser:
|
||||
if api_browser.login(account.username, account.password):
|
||||
log_to_client("✓ 登录成功!", user_id, account_id)
|
||||
api_browser.save_cookies_for_playwright(account.username)
|
||||
log_to_client("✓ 首次登录成功,刷新登录时间...", user_id, account_id)
|
||||
|
||||
# 二次登录:让"上次登录时间"变成刚才首次登录的时间
|
||||
# 这样截图时显示的"上次登录时间"就是几秒前而不是昨天
|
||||
if api_browser.login(account.username, account.password):
|
||||
log_to_client("✓ 二次登录成功!", user_id, account_id)
|
||||
else:
|
||||
log_to_client("⚠ 二次登录失败,继续使用首次登录状态", user_id, account_id)
|
||||
|
||||
api_browser.save_cookies_for_screenshot(account.username)
|
||||
database.reset_account_login_status(account_id)
|
||||
|
||||
if not account.remark:
|
||||
|
||||
Reference in New Issue
Block a user