From 1323acaa848d1eaafbe5535434bc8912bbcfa158 Mon Sep 17 00:00:00 2001 From: vaclavec <82129251+vaclavec@users.noreply.github.com> Date: Tue, 25 Nov 2025 15:39:12 +0100 Subject: [PATCH 1/6] adding ahh features --- backend/activity_tracker.py | 183 +++++++++ backend/api_monitor.py | 206 ++++++++++ backend/bandwidth_limiter.py | 123 ++++++ backend/download_history.py | 176 +++++++++ backend/downloads.py | 9 + backend/fix_conflicts.py | 252 ++++++++++++ backend/game_metadata.json | 5 + backend/game_metadata.py | 270 +++++++++++++ backend/main.py | 263 ++++++++++++- backend/script_dependencies.py | 284 ++++++++++++++ backend/statistics.py | 200 ++++++++++ public/luatools.js | 686 ++++++++++++++++++++++++++++++++- 12 files changed, 2654 insertions(+), 3 deletions(-) create mode 100644 backend/activity_tracker.py create mode 100644 backend/api_monitor.py create mode 100644 backend/bandwidth_limiter.py create mode 100644 backend/download_history.py create mode 100644 backend/fix_conflicts.py create mode 100644 backend/game_metadata.json create mode 100644 backend/game_metadata.py create mode 100644 backend/script_dependencies.py create mode 100644 backend/statistics.py diff --git a/backend/activity_tracker.py b/backend/activity_tracker.py new file mode 100644 index 0000000..eda13e1 --- /dev/null +++ b/backend/activity_tracker.py @@ -0,0 +1,183 @@ +"""Real-time activity tracking for live dashboard display.""" + +from __future__ import annotations + +import json +import threading +import time +from typing import Any, Dict, List, Optional + +from logger import logger + +# Activity tracking +_ACTIVITY_STATE = { + "lock": threading.Lock(), + "current_operations": {}, # op_id: {type, status, progress, started_at} + "operation_history": [], # List of completed operations + "max_history": 100, +} + + +def start_operation(operation_id: str, op_type: str, description: str = "") -> None: + """Mark the start of an operation.""" + with _ACTIVITY_STATE["lock"]: + _ACTIVITY_STATE["current_operations"][operation_id] = { + "id": operation_id, + "type": op_type, # "download", "install", "fix_apply", etc. + "description": description, + "status": "starting", + "progress": 0.0, + "started_at": time.time(), + "bytes_total": 0, + "bytes_current": 0, + "speed": 0.0, + } + + +def update_operation(operation_id: str, status: str = "", progress: float = 0.0, + bytes_total: int = 0, bytes_current: int = 0, speed: float = 0.0) -> None: + """Update operation progress.""" + with _ACTIVITY_STATE["lock"]: + if operation_id in _ACTIVITY_STATE["current_operations"]: + op = _ACTIVITY_STATE["current_operations"][operation_id] + if status: + op["status"] = status + if progress >= 0: + op["progress"] = min(100.0, progress) + if bytes_total > 0: + op["bytes_total"] = bytes_total + if bytes_current >= 0: + op["bytes_current"] = bytes_current + if speed >= 0: + op["speed"] = speed + + +def complete_operation(operation_id: str, success: bool = True, error: str = "") -> None: + """Mark operation as complete.""" + with _ACTIVITY_STATE["lock"]: + if operation_id in _ACTIVITY_STATE["current_operations"]: + op = _ACTIVITY_STATE["current_operations"].pop(operation_id) + op["status"] = "success" if success else "failed" + op["completed_at"] = time.time() + if error: + op["error"] = error + op["progress"] = 100.0 + + _ACTIVITY_STATE["operation_history"].append(op) + + # Trim history + if len(_ACTIVITY_STATE["operation_history"]) > _ACTIVITY_STATE["max_history"]: + _ACTIVITY_STATE["operation_history"] = _ACTIVITY_STATE["operation_history"][-_ACTIVITY_STATE["max_history"]:] + + +def cancel_operation(operation_id: str) -> None: + """Mark operation as cancelled.""" + with _ACTIVITY_STATE["lock"]: + if operation_id in _ACTIVITY_STATE["current_operations"]: + op = _ACTIVITY_STATE["current_operations"].pop(operation_id) + op["status"] = "cancelled" + op["completed_at"] = time.time() + _ACTIVITY_STATE["operation_history"].append(op) + + +def get_current_operations() -> List[Dict[str, Any]]: + """Get list of currently active operations.""" + with _ACTIVITY_STATE["lock"]: + return list(_ACTIVITY_STATE["current_operations"].values()) + + +def get_operation_history(limit: int = 50) -> List[Dict[str, Any]]: + """Get operation history.""" + with _ACTIVITY_STATE["lock"]: + return _ACTIVITY_STATE["operation_history"][-limit:][::-1] + + +def get_dashboard_data() -> Dict[str, Any]: + """Get comprehensive dashboard data for real-time display.""" + with _ACTIVITY_STATE["lock"]: + current_ops = list(_ACTIVITY_STATE["current_operations"].values()) + + # Calculate aggregate statistics + total_ops = len(current_ops) + downloading = sum(1 for op in current_ops if op.get("type") == "download") + installing = sum(1 for op in current_ops if op.get("type") == "install") + applying_fixes = sum(1 for op in current_ops if op.get("type") == "fix_apply") + + # Calculate total speed (sum of all active downloads) + total_speed = sum(op.get("speed", 0.0) for op in current_ops if op.get("type") == "download") + + # Calculate total data transferred + total_bytes = sum(op.get("bytes_current", 0) for op in current_ops) + + # Calculate average progress + avg_progress = 0.0 + if current_ops: + avg_progress = sum(op.get("progress", 0.0) for op in current_ops) / len(current_ops) + + return { + "timestamp": time.time(), + "current_operations": current_ops, + "operation_counts": { + "total": total_ops, + "downloading": downloading, + "installing": installing, + "applying_fixes": applying_fixes, + }, + "aggregates": { + "total_speed_bytes_per_sec": total_speed, + "total_bytes_transferred": total_bytes, + "average_progress_percent": round(avg_progress, 1), + }, + "recent_history": _ACTIVITY_STATE["operation_history"][-10:], + } + + +def get_dashboard_json() -> str: + """Get dashboard data as JSON.""" + data = get_dashboard_data() + return json.dumps({ + "success": True, + "dashboard": data, + }) + + +def clear_history() -> None: + """Clear operation history.""" + with _ACTIVITY_STATE["lock"]: + _ACTIVITY_STATE["operation_history"] = [] + + +def get_operation_statistics() -> Dict[str, Any]: + """Get statistics from operation history.""" + with _ACTIVITY_STATE["lock"]: + history = _ACTIVITY_STATE["operation_history"] + + if not history: + return { + "total_operations": 0, + "successful": 0, + "failed": 0, + "cancelled": 0, + "success_rate": 0.0, + "average_duration_seconds": 0.0, + } + + successful = sum(1 for op in history if op.get("status") == "success") + failed = sum(1 for op in history if op.get("status") == "failed") + cancelled = sum(1 for op in history if op.get("status") == "cancelled") + + durations = [ + op.get("completed_at", op.get("started_at", 0)) - op.get("started_at", 0) + for op in history + if op.get("completed_at") + ] + avg_duration = sum(durations) / len(durations) if durations else 0 + + return { + "total_operations": len(history), + "successful": successful, + "failed": failed, + "cancelled": cancelled, + "success_rate": (successful / len(history) * 100) if history else 0, + "average_duration_seconds": round(avg_duration, 2), + } diff --git a/backend/api_monitor.py b/backend/api_monitor.py new file mode 100644 index 0000000..4a3dfb9 --- /dev/null +++ b/backend/api_monitor.py @@ -0,0 +1,206 @@ +"""API monitoring and analytics for LuaTools.""" + +from __future__ import annotations + +import json +import os +import threading +import time +from typing import Any, Dict, List, Optional + +from logger import logger +from paths import backend_path +from utils import read_json, write_json + +API_MONITOR_FILE = "api_monitor.json" +MONITOR_LOCK = threading.Lock() + +# In-memory cache +_MONITOR_CACHE: Dict[str, Any] = {} +_CACHE_INITIALIZED = False +_MAX_HISTORY_PER_API = 1000 + + +def _get_monitor_path() -> str: + return backend_path(API_MONITOR_FILE) + + +def _ensure_monitor_initialized() -> None: + """Initialize API monitoring file if not exists.""" + global _MONITOR_CACHE, _CACHE_INITIALIZED + + if _CACHE_INITIALIZED and _MONITOR_CACHE: + return + + path = _get_monitor_path() + if os.path.exists(path): + try: + _MONITOR_CACHE = read_json(path) + _CACHE_INITIALIZED = True + return + except Exception as exc: + logger.warn(f"LuaTools: Failed to load API monitor: {exc}") + + # Create default structure + _MONITOR_CACHE = { + "version": 1, + "created_at": time.time(), + "apis": {}, # url: {requests: [], last_status: 200, up_count: 0, down_count: 0} + } + _persist_monitor() + _CACHE_INITIALIZED = True + + +def _persist_monitor() -> None: + """Write monitor data to disk.""" + try: + path = _get_monitor_path() + write_json(path, _MONITOR_CACHE) + except Exception as exc: + logger.warn(f"LuaTools: Failed to persist API monitor: {exc}") + + +def record_api_request(api_url: str, status_code: int = 200, response_time_ms: float = 0.0, success: bool = True) -> None: + """Record an API request.""" + with MONITOR_LOCK: + _ensure_monitor_initialized() + + api_url_str = str(api_url).strip() + if api_url_str not in _MONITOR_CACHE["apis"]: + _MONITOR_CACHE["apis"][api_url_str] = { + "requests": [], + "last_status": 0, + "last_checked": 0, + "success_count": 0, + "failure_count": 0, + "total_response_time": 0.0, + } + + api_entry = _MONITOR_CACHE["apis"][api_url_str] + request_entry = { + "timestamp": time.time(), + "status_code": status_code, + "response_time_ms": response_time_ms, + "success": success, + } + + api_entry["requests"].append(request_entry) + api_entry["last_status"] = status_code + api_entry["last_checked"] = time.time() + api_entry["total_response_time"] = api_entry.get("total_response_time", 0.0) + response_time_ms + + if success: + api_entry["success_count"] = api_entry.get("success_count", 0) + 1 + else: + api_entry["failure_count"] = api_entry.get("failure_count", 0) + 1 + + # Keep history manageable + if len(api_entry["requests"]) > _MAX_HISTORY_PER_API: + api_entry["requests"] = api_entry["requests"][-_MAX_HISTORY_PER_API :] + + _persist_monitor() + + +def get_api_status(api_url: str) -> Dict[str, Any]: + """Get current status of an API.""" + with MONITOR_LOCK: + _ensure_monitor_initialized() + + api_url_str = str(api_url).strip() + if api_url_str not in _MONITOR_CACHE["apis"]: + return { + "url": api_url_str, + "status": "unknown", + "last_checked": 0, + "uptime_percentage": 0, + "average_response_time_ms": 0, + } + + api_entry = _MONITOR_CACHE["apis"][api_url_str] + requests = api_entry.get("requests", []) + total = len(requests) + + uptime = 0 + avg_response_time = 0 + if total > 0: + success = api_entry.get("success_count", 0) + uptime = (success / total * 100) if total > 0 else 0 + total_time = api_entry.get("total_response_time", 0.0) + avg_response_time = total_time / total if total > 0 else 0 + + is_up = api_entry.get("last_status", 0) == 200 + return { + "url": api_url_str, + "status": "up" if is_up else "down", + "last_checked": api_entry.get("last_checked", 0), + "last_status_code": api_entry.get("last_status", 0), + "uptime_percentage": round(uptime, 2), + "average_response_time_ms": round(avg_response_time, 2), + "total_requests": total, + "success_count": api_entry.get("success_count", 0), + "failure_count": api_entry.get("failure_count", 0), + } + + +def get_all_api_statuses() -> List[Dict[str, Any]]: + """Get status of all monitored APIs.""" + with MONITOR_LOCK: + _ensure_monitor_initialized() + + statuses = [] + for api_url in _MONITOR_CACHE["apis"]: + status = get_api_status(api_url) + statuses.append(status) + + return sorted(statuses, key=lambda x: x.get("last_checked", 0), reverse=True) + + +def get_api_performance_metrics(api_url: str, limit: int = 100) -> Dict[str, Any]: + """Get detailed performance metrics for an API.""" + with MONITOR_LOCK: + _ensure_monitor_initialized() + + api_url_str = str(api_url).strip() + if api_url_str not in _MONITOR_CACHE["apis"]: + return {"success": False, "error": "API not found"} + + api_entry = _MONITOR_CACHE["apis"][api_url_str] + requests = api_entry.get("requests", [])[-limit :] + + response_times = [r.get("response_time_ms", 0) for r in requests] + status_codes = [r.get("status_code", 0) for r in requests] + + return { + "success": True, + "url": api_url_str, + "request_count": len(requests), + "latest_requests": requests, + "min_response_time_ms": min(response_times) if response_times else 0, + "max_response_time_ms": max(response_times) if response_times else 0, + "avg_response_time_ms": sum(response_times) / len(response_times) if response_times else 0, + "status_code_distribution": _count_status_codes(status_codes), + } + + +def _count_status_codes(codes: List[int]) -> Dict[int, int]: + """Count occurrences of each status code.""" + counts: Dict[int, int] = {} + for code in codes: + counts[code] = counts.get(code, 0) + 1 + return counts + + +def get_monitor_json() -> str: + """Get all monitoring data as JSON.""" + statuses = get_all_api_statuses() + return json.dumps({ + "success": True, + "apis": statuses, + "timestamp": time.time(), + }) + + +def is_api_available(api_url: str, required_uptime_percentage: float = 80.0) -> bool: + """Check if an API is available based on uptime threshold.""" + status = get_api_status(api_url) + return status.get("uptime_percentage", 0) >= required_uptime_percentage diff --git a/backend/bandwidth_limiter.py b/backend/bandwidth_limiter.py new file mode 100644 index 0000000..94ab574 --- /dev/null +++ b/backend/bandwidth_limiter.py @@ -0,0 +1,123 @@ +"""Bandwidth throttling and rate limiting for LuaTools downloads.""" + +from __future__ import annotations + +import threading +import time +from typing import Optional + +from logger import logger + +# Global throttle state +_THROTTLE_STATE = { + "enabled": False, + "max_bytes_per_second": 0, # 0 = unlimited + "current_speed": 0.0, + "lock": threading.Lock(), +} + + +def enable_throttling(max_bytes_per_second: int = 1024 * 1024) -> None: + """Enable bandwidth throttling.""" + with _THROTTLE_STATE["lock"]: + _THROTTLE_STATE["enabled"] = True + _THROTTLE_STATE["max_bytes_per_second"] = max(max_bytes_per_second, 1024) # Minimum 1 KB/s + logger.log(f"LuaTools: Bandwidth throttling enabled at {max_bytes_per_second} bytes/sec") + + +def disable_throttling() -> None: + """Disable bandwidth throttling.""" + with _THROTTLE_STATE["lock"]: + _THROTTLE_STATE["enabled"] = False + logger.log("LuaTools: Bandwidth throttling disabled") + + +def set_bandwidth_limit(max_bytes_per_second: int) -> None: + """Set bandwidth limit.""" + with _THROTTLE_STATE["lock"]: + _THROTTLE_STATE["max_bytes_per_second"] = max(max_bytes_per_second, 1024) + if _THROTTLE_STATE["enabled"]: + logger.log(f"LuaTools: Bandwidth limit set to {max_bytes_per_second} bytes/sec") + + +def get_bandwidth_settings() -> dict: + """Get current bandwidth settings.""" + with _THROTTLE_STATE["lock"]: + return { + "enabled": _THROTTLE_STATE["enabled"], + "max_bytes_per_second": _THROTTLE_STATE["max_bytes_per_second"], + "current_speed": _THROTTLE_STATE["current_speed"], + } + + +class BandwidthLimiter: + """Context manager for bandwidth-limited downloads.""" + + def __init__(self): + self.start_time: Optional[float] = None + self.bytes_downloaded = 0 + + def throttle_if_needed(self, bytes_chunk: int) -> None: + """Throttle download if bandwidth limit is set.""" + with _THROTTLE_STATE["lock"]: + if not _THROTTLE_STATE["enabled"]: + return + + max_bytes_per_sec = _THROTTLE_STATE["max_bytes_per_second"] + if max_bytes_per_sec <= 0: + return + + if self.start_time is None: + self.start_time = time.time() + + self.bytes_downloaded += bytes_chunk + elapsed = time.time() - self.start_time + + # Calculate expected time for downloaded bytes + expected_time = self.bytes_downloaded / max_bytes_per_sec + + if expected_time > elapsed: + # Sleep to maintain the rate limit + sleep_time = expected_time - elapsed + time.sleep(sleep_time) + + # Update current speed + if elapsed > 0: + with _THROTTLE_STATE["lock"]: + _THROTTLE_STATE["current_speed"] = self.bytes_downloaded / elapsed + + def reset(self) -> None: + """Reset the limiter.""" + self.start_time = None + self.bytes_downloaded = 0 + + +def format_bandwidth(bytes_per_second: float) -> str: + """Format bandwidth as human-readable string.""" + if bytes_per_second < 1024: + return f"{bytes_per_second:.0f} B/s" + elif bytes_per_second < 1024 * 1024: + return f"{bytes_per_second / 1024:.1f} KB/s" + elif bytes_per_second < 1024 * 1024 * 1024: + return f"{bytes_per_second / (1024 * 1024):.1f} MB/s" + else: + return f"{bytes_per_second / (1024 * 1024 * 1024):.1f} GB/s" + + +def format_time_remaining(bytes_remaining: int, current_speed: float) -> str: + """Format estimated time remaining.""" + if current_speed <= 0: + return "Unknown" + + seconds_remaining = bytes_remaining / current_speed + + if seconds_remaining < 60: + return f"{int(seconds_remaining)}s" + elif seconds_remaining < 3600: + minutes = int(seconds_remaining / 60) + seconds = int(seconds_remaining % 60) + return f"{minutes}m {seconds}s" + else: + hours = int(seconds_remaining / 3600) + minutes = int((seconds_remaining % 3600) / 60) + return f"{hours}h {minutes}m" diff --git a/backend/download_history.py b/backend/download_history.py new file mode 100644 index 0000000..b27d717 --- /dev/null +++ b/backend/download_history.py @@ -0,0 +1,176 @@ +"""Download history tracking for LuaTools.""" + +from __future__ import annotations + +import json +import os +import threading +import time +from typing import Any, Dict, List, Optional + +from logger import logger +from paths import backend_path +from utils import read_json, write_json + +DOWNLOAD_HISTORY_FILE = "download_history.json" +HISTORY_LOCK = threading.Lock() + +# In-memory cache +_HISTORY_CACHE: Dict[str, Any] = {} +_CACHE_INITIALIZED = False +_MAX_HISTORY_ENTRIES = 1000 # Keep last 1000 downloads + + +def _get_history_path() -> str: + return backend_path(DOWNLOAD_HISTORY_FILE) + + +def _ensure_history_initialized() -> None: + """Initialize history file if not exists.""" + global _HISTORY_CACHE, _CACHE_INITIALIZED + + if _CACHE_INITIALIZED and _HISTORY_CACHE: + return + + path = _get_history_path() + if os.path.exists(path): + try: + _HISTORY_CACHE = read_json(path) + _CACHE_INITIALIZED = True + return + except Exception as exc: + logger.warn(f"LuaTools: Failed to load download history: {exc}") + + # Create default history structure + _HISTORY_CACHE = { + "version": 1, + "created_at": time.time(), + "downloads": [], # List of download entries + "total_downloaded_bytes": 0, + } + _persist_history() + _CACHE_INITIALIZED = True + + +def _persist_history() -> None: + """Write history to disk.""" + try: + path = _get_history_path() + write_json(path, _HISTORY_CACHE) + except Exception as exc: + logger.warn(f"LuaTools: Failed to persist download history: {exc}") + + +def record_download_start(download_id: str, appid: int, app_name: str, file_url: str, file_size: int = 0) -> None: + """Record the start of a download.""" + with HISTORY_LOCK: + _ensure_history_initialized() + + entry = { + "id": download_id, + "appid": appid, + "app_name": app_name, + "file_url": file_url, + "file_size": file_size, + "started_at": time.time(), + "status": "downloading", + } + + _HISTORY_CACHE["downloads"].append(entry) + + # Keep history size manageable + if len(_HISTORY_CACHE["downloads"]) > _MAX_HISTORY_ENTRIES: + _HISTORY_CACHE["downloads"] = _HISTORY_CACHE["downloads"][-_MAX_HISTORY_ENTRIES :] + + _persist_history() + logger.log(f"LuaTools: Started tracking download {download_id} for appid {appid}") + + +def record_download_complete(download_id: str, success: bool = True, bytes_downloaded: int = 0, error: str = "") -> None: + """Record the completion of a download.""" + with HISTORY_LOCK: + _ensure_history_initialized() + + # Find and update entry + for entry in _HISTORY_CACHE["downloads"]: + if entry.get("id") == download_id: + entry["completed_at"] = time.time() + entry["status"] = "success" if success else "failed" + entry["bytes_downloaded"] = bytes_downloaded + if error: + entry["error"] = error + + if success: + _HISTORY_CACHE["total_downloaded_bytes"] = _HISTORY_CACHE.get("total_downloaded_bytes", 0) + bytes_downloaded + + _persist_history() + logger.log(f"LuaTools: Download {download_id} completed with status {'success' if success else 'failed'}") + return + + logger.warn(f"LuaTools: Download entry {download_id} not found for completion record") + + +def record_download_cancelled(download_id: str) -> None: + """Record that a download was cancelled.""" + with HISTORY_LOCK: + _ensure_history_initialized() + + for entry in _HISTORY_CACHE["downloads"]: + if entry.get("id") == download_id: + entry["completed_at"] = time.time() + entry["status"] = "cancelled" + _persist_history() + return + + +def get_download_history(limit: int = 50) -> List[Dict[str, Any]]: + """Get recent download history.""" + with HISTORY_LOCK: + _ensure_history_initialized() + # Return most recent downloads first + return _HISTORY_CACHE["downloads"][-limit :][::-1] + + +def get_download_history_json(limit: int = 50) -> str: + """Get download history as JSON.""" + history = get_download_history(limit) + return json.dumps({ + "success": True, + "downloads": history, + "total_downloaded_bytes": _HISTORY_CACHE.get("total_downloaded_bytes", 0), + }) + + +def get_download_statistics() -> Dict[str, Any]: + """Get aggregate download statistics.""" + with HISTORY_LOCK: + _ensure_history_initialized() + + downloads = _HISTORY_CACHE.get("downloads", []) + successful = [d for d in downloads if d.get("status") == "success"] + failed = [d for d in downloads if d.get("status") == "failed"] + cancelled = [d for d in downloads if d.get("status") == "cancelled"] + + total_size = sum(d.get("bytes_downloaded", 0) for d in successful) + avg_download_time = 0.0 + if successful: + times = [d.get("completed_at", 0) - d.get("started_at", 0) for d in successful] + avg_download_time = sum(times) / len(times) if times else 0 + + return { + "total_downloads": len(downloads), + "successful_downloads": len(successful), + "failed_downloads": len(failed), + "cancelled_downloads": len(cancelled), + "success_rate": len(successful) / len(downloads) * 100 if downloads else 0, + "total_bytes_downloaded": total_size, + "average_download_time_seconds": avg_download_time, + } + + +def clear_download_history() -> None: + """Clear all download history.""" + with HISTORY_LOCK: + _HISTORY_CACHE["downloads"] = [] + _persist_history() + logger.log("LuaTools: Download history cleared") diff --git a/backend/downloads.py b/backend/downloads.py index 979987d..491e292 100644 --- a/backend/downloads.py +++ b/backend/downloads.py @@ -24,6 +24,7 @@ from http_client import ensure_http_client from logger import logger from paths import backend_path, public_path +from statistics import record_download as stats_record_download from steam_utils import detect_steam_install_path, has_lua_for_app from utils import count_apis, ensure_temp_download_dir, normalize_manifest_text, read_text, write_text @@ -327,6 +328,14 @@ def _download_zip_for_app(appid: int): _log_appid_event(f"ADDED - {name}", appid, fetched_name) except Exception: pass + + # Track download statistics + try: + file_size = os.path.getsize(dest_path) if os.path.exists(dest_path) else 0 + stats_record_download(appid, file_size) + except Exception as stats_err: + logger.warn(f"LuaTools: Failed to record download stats: {stats_err}") + _set_download_state(appid, {"status": "done", "success": True, "api": name}) return except Exception as install_exc: diff --git a/backend/fix_conflicts.py b/backend/fix_conflicts.py new file mode 100644 index 0000000..c2a317d --- /dev/null +++ b/backend/fix_conflicts.py @@ -0,0 +1,252 @@ +"""Fix conflict detection system for LuaTools.""" + +from __future__ import annotations + +import json +import os +import threading +import time +from typing import Any, Dict, List, Optional, Set, Tuple + +from logger import logger +from paths import backend_path +from utils import read_json, write_json + +CONFLICT_MATRIX_FILE = "fix_conflicts.json" +CONFLICT_LOCK = threading.Lock() + +# In-memory cache +_CONFLICT_CACHE: Dict[str, Any] = {} +_CACHE_INITIALIZED = False + + +def _get_conflict_path() -> str: + return backend_path(CONFLICT_MATRIX_FILE) + + +def _ensure_conflicts_initialized() -> None: + """Initialize conflict matrix file if not exists.""" + global _CONFLICT_CACHE, _CACHE_INITIALIZED + + if _CACHE_INITIALIZED and _CONFLICT_CACHE: + return + + path = _get_conflict_path() + if os.path.exists(path): + try: + _CONFLICT_CACHE = read_json(path) + _CACHE_INITIALIZED = True + return + except Exception as exc: + logger.warn(f"LuaTools: Failed to load conflict matrix: {exc}") + + # Create default structure + _CONFLICT_CACHE = { + "version": 1, + "created_at": time.time(), + "game_fixes": {}, # appid: {generic: {}, online: {}, last_applied: time} + "known_conflicts": [], # List of known conflict pairs + } + _persist_conflicts() + _CACHE_INITIALIZED = True + + +def _persist_conflicts() -> None: + """Write conflict data to disk.""" + try: + path = _get_conflict_path() + write_json(path, _CONFLICT_CACHE) + except Exception as exc: + logger.warn(f"LuaTools: Failed to persist conflict matrix: {exc}") + + +def record_fix_applied(appid: int, fix_type: str, fix_version: str = "", fix_url: str = "") -> None: + """Record that a fix was applied to a game.""" + with CONFLICT_LOCK: + _ensure_conflicts_initialized() + + appid_str = str(appid) + if appid_str not in _CONFLICT_CACHE["game_fixes"]: + _CONFLICT_CACHE["game_fixes"][appid_str] = { + "generic": {}, + "online": {}, + "last_applied": 0, + } + + game_entry = _CONFLICT_CACHE["game_fixes"][appid_str] + fix_data = { + "version": fix_version, + "url": fix_url, + "applied_at": time.time(), + } + + if fix_type == "generic": + game_entry["generic"] = fix_data + elif fix_type == "online": + game_entry["online"] = fix_data + + game_entry["last_applied"] = time.time() + _persist_conflicts() + logger.log(f"LuaTools: Recorded {fix_type} fix for appid {appid}") + + +def record_fix_removed(appid: int, fix_type: str) -> None: + """Record that a fix was removed from a game.""" + with CONFLICT_LOCK: + _ensure_conflicts_initialized() + + appid_str = str(appid) + if appid_str in _CONFLICT_CACHE["game_fixes"]: + game_entry = _CONFLICT_CACHE["game_fixes"][appid_str] + if fix_type == "generic": + game_entry["generic"] = {} + elif fix_type == "online": + game_entry["online"] = {} + _persist_conflicts() + + +def check_for_conflicts(appid: int, proposed_fix_type: str) -> Dict[str, Any]: + """Check if applying a fix would cause conflicts.""" + with CONFLICT_LOCK: + _ensure_conflicts_initialized() + + appid_str = str(appid) + if appid_str not in _CONFLICT_CACHE["game_fixes"]: + return { + "appid": appid, + "has_conflicts": False, + "conflicts": [], + "warnings": [], + } + + game_entry = _CONFLICT_CACHE["game_fixes"][appid_str] + conflicts = [] + warnings = [] + + # Check primary conflicts + if proposed_fix_type == "online" and game_entry.get("generic"): + # Applying online fix when generic exists + conflicts.append({ + "type": "GENERIC_ONLINE_CONFLICT", + "description": "Generic and Online fixes may conflict. Generic fix will be replaced.", + "severity": "warning", + "conflicting_fix": "generic", + }) + warnings.append("Online fix is recommended for multiplayer. Generic fix will be removed.") + + elif proposed_fix_type == "generic" and game_entry.get("online"): + # Applying generic fix when online exists + conflicts.append({ + "type": "ONLINE_GENERIC_CONFLICT", + "description": "Online and Generic fixes may conflict. Online fix will be replaced.", + "severity": "warning", + "conflicting_fix": "online", + }) + warnings.append("You have an Online fix installed. Applying Generic fix will remove it.") + + # Check for known problematic combinations + for conflict_pair in _CONFLICT_CACHE.get("known_conflicts", []): + if (appid in conflict_pair.get("appids", []) and + proposed_fix_type in conflict_pair.get("fix_types", [])): + conflicts.append({ + "type": conflict_pair.get("type", "KNOWN_CONFLICT"), + "description": conflict_pair.get("description", "Known conflict detected"), + "severity": conflict_pair.get("severity", "warning"), + }) + + return { + "appid": appid, + "has_conflicts": len(conflicts) > 0, + "conflicts": conflicts, + "warnings": warnings, + } + + +def register_known_conflict(appids: List[int], fix_types: List[str], description: str = "", severity: str = "warning") -> None: + """Register a known conflict between fixes.""" + with CONFLICT_LOCK: + _ensure_conflicts_initialized() + + conflict_entry = { + "appids": appids, + "fix_types": fix_types, + "description": description, + "severity": severity, + "registered_at": time.time(), + } + + _CONFLICT_CACHE["known_conflicts"].append(conflict_entry) + _persist_conflicts() + + +def get_applied_fixes(appid: int) -> Dict[str, Any]: + """Get all currently applied fixes for a game.""" + with CONFLICT_LOCK: + _ensure_conflicts_initialized() + + appid_str = str(appid) + if appid_str not in _CONFLICT_CACHE["game_fixes"]: + return { + "appid": appid, + "generic": None, + "online": None, + "total_fixes": 0, + } + + game_entry = _CONFLICT_CACHE["game_fixes"][appid_str] + generic_fix = game_entry.get("generic") if game_entry.get("generic") else None + online_fix = game_entry.get("online") if game_entry.get("online") else None + + return { + "appid": appid, + "generic": generic_fix, + "online": online_fix, + "total_fixes": (1 if generic_fix else 0) + (1 if online_fix else 0), + "last_applied": game_entry.get("last_applied", 0), + } + + +def get_conflict_report(appid: int) -> Dict[str, Any]: + """Get a comprehensive conflict report for a game.""" + applied = get_applied_fixes(appid) + generic_type = "generic" if applied.get("generic") else None + online_type = "online" if applied.get("online") else None + + # Determine what the user wants to do and check conflicts + conflicts_if_add_generic = check_for_conflicts(appid, "generic") if not generic_type else None + conflicts_if_add_online = check_for_conflicts(appid, "online") if not online_type else None + + return { + "appid": appid, + "applied_fixes": applied, + "potential_conflicts_generic": conflicts_if_add_generic, + "potential_conflicts_online": conflicts_if_add_online, + "recommendations": _generate_recommendations(applied), + } + + +def _generate_recommendations(applied_fixes: Dict[str, Any]) -> List[str]: + """Generate recommendations based on applied fixes.""" + recommendations = [] + + total = applied_fixes.get("total_fixes", 0) + if total == 0: + recommendations.append("No fixes applied. Consider checking if this game needs fixes.") + elif total == 2: + recommendations.append("Both generic and online fixes are applied. This is unusual. Consider removing generic fix if online works.") + + generic = applied_fixes.get("generic") + online = applied_fixes.get("online") + + if generic and not online: + recommendations.append("Only generic fix applied. For multiplayer games, consider Online fix.") + elif online and not generic: + recommendations.append("Online fix applied. Good for multiplayer compatibility.") + + return recommendations + + +def get_conflict_json(appid: int) -> str: + """Get conflict report as JSON.""" + report = get_conflict_report(appid) + return json.dumps({"success": True, "report": report}) diff --git a/backend/game_metadata.json b/backend/game_metadata.json new file mode 100644 index 0000000..6774e62 --- /dev/null +++ b/backend/game_metadata.json @@ -0,0 +1,5 @@ +{ + "version": 1, + "created_at": 1764078384.9861271, + "games": {} +} \ No newline at end of file diff --git a/backend/game_metadata.py b/backend/game_metadata.py new file mode 100644 index 0000000..ce34952 --- /dev/null +++ b/backend/game_metadata.py @@ -0,0 +1,270 @@ +"""Game metadata storage for enhanced game information.""" + +from __future__ import annotations + +import json +import os +import threading +import time +from typing import Any, Dict, List, Optional + +from logger import logger +from paths import backend_path +from utils import read_json, write_json + +GAME_METADATA_FILE = "game_metadata.json" +METADATA_LOCK = threading.Lock() + +# In-memory cache +_METADATA_CACHE: Dict[str, Any] = {} +_CACHE_INITIALIZED = False + + +def _get_metadata_path() -> str: + return backend_path(GAME_METADATA_FILE) + + +def _ensure_metadata_initialized() -> None: + """Initialize metadata file if not exists.""" + global _METADATA_CACHE, _CACHE_INITIALIZED + + if _CACHE_INITIALIZED and _METADATA_CACHE: + return + + path = _get_metadata_path() + if os.path.exists(path): + try: + _METADATA_CACHE = read_json(path) + _CACHE_INITIALIZED = True + return + except Exception as exc: + logger.warn(f"LuaTools: Failed to load game metadata: {exc}") + + # Create default metadata structure + _METADATA_CACHE = { + "version": 1, + "created_at": time.time(), + "games": {}, # appid: {name, tags, notes, rating, favorite, custom_data} + } + _persist_metadata() + _CACHE_INITIALIZED = True + + +def _persist_metadata() -> None: + """Write metadata to disk.""" + try: + path = _get_metadata_path() + write_json(path, _METADATA_CACHE) + except Exception as exc: + logger.warn(f"LuaTools: Failed to persist game metadata: {exc}") + + +def add_or_update_game(appid: int, app_name: str) -> None: + """Add or update a game in metadata.""" + with METADATA_LOCK: + _ensure_metadata_initialized() + + appid_str = str(appid) + if appid_str not in _METADATA_CACHE["games"]: + _METADATA_CACHE["games"][appid_str] = { + "name": app_name, + "tags": [], + "notes": "", + "rating": 0, + "favorite": False, + "added_at": time.time(), + "last_modified": time.time(), + } + else: + _METADATA_CACHE["games"][appid_str]["last_modified"] = time.time() + + _persist_metadata() + + +def set_game_tags(appid: int, tags: List[str]) -> None: + """Set tags for a game.""" + with METADATA_LOCK: + _ensure_metadata_initialized() + + appid_str = str(appid) + if appid_str not in _METADATA_CACHE["games"]: + _METADATA_CACHE["games"][appid_str] = {"tags": []} + + _METADATA_CACHE["games"][appid_str]["tags"] = list(set(tags)) # Remove duplicates + _METADATA_CACHE["games"][appid_str]["last_modified"] = time.time() + _persist_metadata() + + +def add_game_tag(appid: int, tag: str) -> None: + """Add a tag to a game.""" + with METADATA_LOCK: + _ensure_metadata_initialized() + + appid_str = str(appid) + if appid_str not in _METADATA_CACHE["games"]: + _METADATA_CACHE["games"][appid_str] = {"tags": []} + + tags = _METADATA_CACHE["games"][appid_str].get("tags", []) + if tag not in tags: + tags.append(tag) + _METADATA_CACHE["games"][appid_str]["tags"] = tags + _METADATA_CACHE["games"][appid_str]["last_modified"] = time.time() + _persist_metadata() + + +def remove_game_tag(appid: int, tag: str) -> None: + """Remove a tag from a game.""" + with METADATA_LOCK: + _ensure_metadata_initialized() + + appid_str = str(appid) + if appid_str in _METADATA_CACHE["games"]: + tags = _METADATA_CACHE["games"][appid_str].get("tags", []) + if tag in tags: + tags.remove(tag) + _METADATA_CACHE["games"][appid_str]["tags"] = tags + _METADATA_CACHE["games"][appid_str]["last_modified"] = time.time() + _persist_metadata() + + +def set_game_notes(appid: int, notes: str) -> None: + """Set notes for a game.""" + with METADATA_LOCK: + _ensure_metadata_initialized() + + appid_str = str(appid) + if appid_str not in _METADATA_CACHE["games"]: + _METADATA_CACHE["games"][appid_str] = {} + + _METADATA_CACHE["games"][appid_str]["notes"] = str(notes)[:1000] # Max 1000 chars + _METADATA_CACHE["games"][appid_str]["last_modified"] = time.time() + _persist_metadata() + + +def set_game_rating(appid: int, rating: int) -> None: + """Set personal rating for a game (0-5).""" + with METADATA_LOCK: + _ensure_metadata_initialized() + + appid_str = str(appid) + if appid_str not in _METADATA_CACHE["games"]: + _METADATA_CACHE["games"][appid_str] = {} + + # Clamp rating to 0-5 + clamped_rating = max(0, min(5, int(rating))) + _METADATA_CACHE["games"][appid_str]["rating"] = clamped_rating + _METADATA_CACHE["games"][appid_str]["last_modified"] = time.time() + _persist_metadata() + + +def set_game_favorite(appid: int, is_favorite: bool) -> None: + """Mark a game as favorite or not.""" + with METADATA_LOCK: + _ensure_metadata_initialized() + + appid_str = str(appid) + if appid_str not in _METADATA_CACHE["games"]: + _METADATA_CACHE["games"][appid_str] = {} + + _METADATA_CACHE["games"][appid_str]["favorite"] = bool(is_favorite) + _METADATA_CACHE["games"][appid_str]["last_modified"] = time.time() + _persist_metadata() + + +def get_game_metadata(appid: int) -> Dict[str, Any]: + """Get metadata for a specific game.""" + with METADATA_LOCK: + _ensure_metadata_initialized() + + appid_str = str(appid) + if appid_str in _METADATA_CACHE["games"]: + return _METADATA_CACHE["games"][appid_str].copy() + + return { + "name": "", + "tags": [], + "notes": "", + "rating": 0, + "favorite": False, + } + + +def get_all_game_metadata() -> Dict[str, Dict[str, Any]]: + """Get metadata for all games.""" + with METADATA_LOCK: + _ensure_metadata_initialized() + return {k: v.copy() for k, v in _METADATA_CACHE["games"].items()} + + +def get_favorite_games() -> List[Dict[str, Any]]: + """Get all favorite games.""" + with METADATA_LOCK: + _ensure_metadata_initialized() + + favorites = [] + for appid_str, metadata in _METADATA_CACHE["games"].items(): + if metadata.get("favorite", False): + favorites.append({ + "appid": int(appid_str), + "name": metadata.get("name", ""), + **metadata, + }) + return sorted(favorites, key=lambda x: x.get("last_modified", 0), reverse=True) + + +def is_game_favorite(appid: int) -> bool: + """Check if a specific game is marked as favorite.""" + with METADATA_LOCK: + _ensure_metadata_initialized() + appid_str = str(appid) + game = _METADATA_CACHE["games"].get(appid_str, {}) + return game.get("favorite", False) + + +def get_games_by_tag(tag: str) -> List[Dict[str, Any]]: + """Get all games with a specific tag.""" + with METADATA_LOCK: + _ensure_metadata_initialized() + + games = [] + for appid_str, metadata in _METADATA_CACHE["games"].items(): + if tag in metadata.get("tags", []): + games.append({ + "appid": int(appid_str), + **metadata, + }) + return games + + +def search_games(query: str) -> List[Dict[str, Any]]: + """Search games by name, tags, or notes.""" + with METADATA_LOCK: + _ensure_metadata_initialized() + + query_lower = query.lower() + results = [] + + for appid_str, metadata in _METADATA_CACHE["games"].items(): + name = metadata.get("name", "").lower() + notes = metadata.get("notes", "").lower() + tags = [tag.lower() for tag in metadata.get("tags", [])] + + if (query_lower in name or + query_lower in notes or + any(query_lower in tag for tag in tags)): + results.append({ + "appid": int(appid_str), + **metadata, + }) + + return results + + +def get_metadata_json(appid: Optional[int] = None) -> str: + """Get metadata as JSON.""" + if appid is not None: + metadata = get_game_metadata(appid) + return json.dumps({"success": True, "metadata": metadata}) + else: + all_metadata = get_all_game_metadata() + return json.dumps({"success": True, "metadata": all_metadata}) diff --git a/backend/main.py b/backend/main.py index 35cda3b..7b997da 100644 --- a/backend/main.py +++ b/backend/main.py @@ -4,7 +4,7 @@ import sys import webbrowser -from typing import Any +from typing import Any, List import Millennium # type: ignore import PluginUtils # type: ignore @@ -15,13 +15,39 @@ init_apis as api_init_apis, store_last_message, ) +from api_monitor import ( + get_all_api_statuses, + get_monitor_json, + is_api_available, + record_api_request, +) +from activity_tracker import ( + cancel_operation, + complete_operation, + get_dashboard_json, + get_operation_history, + start_operation, + update_operation, +) from auto_update import ( apply_pending_update_if_any, check_for_updates_now as auto_check_for_updates_now, restart_steam as auto_restart_steam, start_auto_update_background_check, ) +from bandwidth_limiter import ( + disable_throttling, + enable_throttling, + get_bandwidth_settings, + set_bandwidth_limit, +) from config import WEBKIT_DIR_NAME, WEB_UI_ICON_FILE, WEB_UI_JS_FILE +from download_history import ( + get_download_history_json, + get_download_statistics, + record_download_complete, + record_download_start, +) from downloads import ( cancel_add_via_luatools, delete_luatools_for_app, @@ -32,6 +58,12 @@ read_loaded_apps, start_add_via_luatools, ) +from fix_conflicts import ( + check_for_conflicts, + get_conflict_json, + record_fix_applied, + record_fix_removed, +) from fixes import ( apply_game_fix, cancel_apply_fix, @@ -40,6 +72,38 @@ get_unfix_status, unfix_game, ) +from game_metadata import ( + add_or_update_game, + get_all_game_metadata, + get_favorite_games, + get_game_metadata, + get_games_by_tag, + get_metadata_json, + search_games, + set_game_favorite, + set_game_notes, + set_game_rating, + set_game_tags, +) +from script_dependencies import ( + check_for_circular_dependencies, + check_for_missing_dependencies, + detect_script_conflicts, + get_all_dependencies, + get_dependencies_json, + register_script, + resolve_installation_order, +) +from statistics import ( + get_statistics, + get_statistics_json, + record_api_fetch, + record_download, + record_fix_applied as stats_record_fix_applied, + record_fix_removed as stats_record_fix_removed, + record_mod_installed, + record_mod_removed, +) from utils import ensure_temp_download_dir from http_client import close_http_client, ensure_http_client from logger import logger as shared_logger @@ -354,6 +418,202 @@ def GetTranslations(contentScriptQuery: str = "", language: str = "", **kwargs: return json.dumps({"success": False, "error": str(exc)}) +# ============================================================================ +# NEW FEATURE API ENDPOINTS (Phase 1: Foundation Features) +# ============================================================================ + +def GetStatistics(contentScriptQuery: str = "") -> str: + """Get plugin statistics.""" + try: + return get_statistics_json() + except Exception as exc: + logger.warn(f"LuaTools: GetStatistics failed: {exc}") + return json.dumps({"success": False, "error": str(exc)}) + + +def GetDownloadHistory(limit: int = 50, contentScriptQuery: str = "") -> str: + """Get download history.""" + try: + return get_download_history_json(limit) + except Exception as exc: + logger.warn(f"LuaTools: GetDownloadHistory failed: {exc}") + return json.dumps({"success": False, "error": str(exc)}) + + +def GetGameMetadata(appid: int = 0, contentScriptQuery: str = "") -> str: + """Get game metadata.""" + try: + if appid > 0: + return get_metadata_json(appid) + else: + return get_metadata_json(None) + except Exception as exc: + logger.warn(f"LuaTools: GetGameMetadata failed: {exc}") + return json.dumps({"success": False, "error": str(exc)}) + + +def SetGameMetadata(appid: int, app_name: str = "", contentScriptQuery: str = "") -> str: + """Set or update game metadata.""" + try: + add_or_update_game(appid, app_name) + return json.dumps({"success": True, "message": f"Game metadata updated for appid {appid}"}) + except Exception as exc: + logger.warn(f"LuaTools: SetGameMetadata failed: {exc}") + return json.dumps({"success": False, "error": str(exc)}) + + +def SetGameTags(appid: int, tags: List[str] = None, contentScriptQuery: str = "") -> str: + """Set tags for a game.""" + try: + if tags is None: + tags = [] + set_game_tags(appid, tags) + return json.dumps({"success": True, "message": f"Tags set for appid {appid}"}) + except Exception as exc: + logger.warn(f"LuaTools: SetGameTags failed: {exc}") + return json.dumps({"success": False, "error": str(exc)}) + + +def SetGameNotes(appid: int, notes: str = "", contentScriptQuery: str = "") -> str: + """Set notes for a game.""" + try: + set_game_notes(appid, notes) + return json.dumps({"success": True, "message": f"Notes set for appid {appid}"}) + except Exception as exc: + logger.warn(f"LuaTools: SetGameNotes failed: {exc}") + return json.dumps({"success": False, "error": str(exc)}) + + +def SetGameRating(appid: int, rating: int = 0, contentScriptQuery: str = "") -> str: + """Set rating for a game (0-5).""" + try: + set_game_rating(appid, rating) + return json.dumps({"success": True, "message": f"Rating set for appid {appid}"}) + except Exception as exc: + logger.warn(f"LuaTools: SetGameRating failed: {exc}") + return json.dumps({"success": False, "error": str(exc)}) + + +def SetGameFavorite(appid: int, is_favorite: bool = False, contentScriptQuery: str = "") -> str: + """Mark a game as favorite.""" + try: + set_game_favorite(appid, is_favorite) + return json.dumps({"success": True, "message": f"Favorite status updated for appid {appid}"}) + except Exception as exc: + logger.warn(f"LuaTools: SetGameFavorite failed: {exc}") + return json.dumps({"success": False, "error": str(exc)}) + + +def GetFavoriteGames(contentScriptQuery: str = "") -> str: + """Get all favorite games.""" + try: + favorites = get_favorite_games() + return json.dumps({"success": True, "games": favorites}) + except Exception as exc: + logger.warn(f"LuaTools: GetFavoriteGames failed: {exc}") + return json.dumps({"success": False, "error": str(exc)}) + + +def IsGameFavorite(appid: int, contentScriptQuery: str = "") -> str: + """Check if a game is marked as favorite.""" + try: + is_fav = is_game_favorite(appid) + return json.dumps({"success": True, "isFavorite": is_fav}) + except Exception as exc: + logger.warn(f"LuaTools: IsGameFavorite failed: {exc}") + return json.dumps({"success": False, "error": str(exc)}) + + +def SearchGames(query: str = "", contentScriptQuery: str = "") -> str: + """Search games by name, tags, or notes.""" + try: + results = search_games(query) + return json.dumps({"success": True, "results": results}) + except Exception as exc: + logger.warn(f"LuaTools: SearchGames failed: {exc}") + return json.dumps({"success": False, "error": str(exc)}) + + +def GetAPIMonitor(contentScriptQuery: str = "") -> str: + """Get API monitoring statistics.""" + try: + return get_monitor_json() + except Exception as exc: + logger.warn(f"LuaTools: GetAPIMonitor failed: {exc}") + return json.dumps({"success": False, "error": str(exc)}) + + +def CheckFixConflicts(appid: int, fix_type: str = "generic", contentScriptQuery: str = "") -> str: + """Check for fix conflicts before applying.""" + try: + result = check_for_conflicts(appid, fix_type) + return json.dumps({"success": True, **result}) + except Exception as exc: + logger.warn(f"LuaTools: CheckFixConflicts failed: {exc}") + return json.dumps({"success": False, "error": str(exc)}) + + +def GetScriptDependencies(script_id: str, contentScriptQuery: str = "") -> str: + """Get script dependency information.""" + try: + return get_dependencies_json(script_id) + except Exception as exc: + logger.warn(f"LuaTools: GetScriptDependencies failed: {exc}") + return json.dumps({"success": False, "error": str(exc)}) + + +def RegisterScript(script_id: str, name: str = "", version: str = "", dependencies: List[str] = None, contentScriptQuery: str = "") -> str: + """Register a script with its dependencies.""" + try: + if dependencies is None: + dependencies = [] + register_script(script_id, name, version, dependencies) + return json.dumps({"success": True, "message": f"Script {script_id} registered"}) + except Exception as exc: + logger.warn(f"LuaTools: RegisterScript failed: {exc}") + return json.dumps({"success": False, "error": str(exc)}) + + +def GetActivityDashboard(contentScriptQuery: str = "") -> str: + """Get real-time activity dashboard data.""" + try: + return get_dashboard_json() + except Exception as exc: + logger.warn(f"LuaTools: GetActivityDashboard failed: {exc}") + return json.dumps({"success": False, "error": str(exc)}) + + +def GetBandwidthSettings(contentScriptQuery: str = "") -> str: + """Get current bandwidth limiting settings.""" + try: + settings = get_bandwidth_settings() + return json.dumps({"success": True, "settings": settings}) + except Exception as exc: + logger.warn(f"LuaTools: GetBandwidthSettings failed: {exc}") + return json.dumps({"success": False, "error": str(exc)}) + + +def SetBandwidthLimit(max_bytes_per_second: int, contentScriptQuery: str = "") -> str: + """Set bandwidth limit for downloads.""" + try: + set_bandwidth_limit(max_bytes_per_second) + enable_throttling(max_bytes_per_second) + return json.dumps({"success": True, "message": f"Bandwidth limit set to {max_bytes_per_second} bytes/sec"}) + except Exception as exc: + logger.warn(f"LuaTools: SetBandwidthLimit failed: {exc}") + return json.dumps({"success": False, "error": str(exc)}) + + +def DisableBandwidthLimit(contentScriptQuery: str = "") -> str: + """Disable bandwidth limiting.""" + try: + disable_throttling() + return json.dumps({"success": True, "message": "Bandwidth limiting disabled"}) + except Exception as exc: + logger.warn(f"LuaTools: DisableBandwidthLimit failed: {exc}") + return json.dumps({"success": False, "error": str(exc)}) + + class Plugin: def _front_end_loaded(self): _copy_webkit_files() @@ -399,3 +659,4 @@ def _unload(self): plugin = Plugin() + diff --git a/backend/script_dependencies.py b/backend/script_dependencies.py new file mode 100644 index 0000000..7f78a04 --- /dev/null +++ b/backend/script_dependencies.py @@ -0,0 +1,284 @@ +"""Script dependency resolver for Lua script management.""" + +from __future__ import annotations + +import json +import os +import re +import threading +from typing import Any, Dict, List, Optional, Set + +from logger import logger +from paths import backend_path +from utils import read_json, write_json + +DEPENDENCIES_FILE = "script_dependencies.json" +DEPS_LOCK = threading.Lock() + +# In-memory cache +_DEPS_CACHE: Dict[str, Any] = {} +_CACHE_INITIALIZED = False + + +def _get_deps_path() -> str: + return backend_path(DEPENDENCIES_FILE) + + +def _ensure_deps_initialized() -> None: + """Initialize dependencies file if not exists.""" + global _DEPS_CACHE, _CACHE_INITIALIZED + + if _CACHE_INITIALIZED and _DEPS_CACHE: + return + + path = _get_deps_path() + if os.path.exists(path): + try: + _DEPS_CACHE = read_json(path) + _CACHE_INITIALIZED = True + return + except Exception as exc: + logger.warn(f"LuaTools: Failed to load script dependencies: {exc}") + + # Create default structure + _DEPS_CACHE = { + "version": 1, + "scripts": {}, # script_id: {name, version, dependencies: [], required_by: []} + } + _persist_deps() + _CACHE_INITIALIZED = True + + +def _persist_deps() -> None: + """Write dependencies to disk.""" + try: + path = _get_deps_path() + write_json(path, _DEPS_CACHE) + except Exception as exc: + logger.warn(f"LuaTools: Failed to persist script dependencies: {exc}") + + +def register_script(script_id: str, name: str = "", version: str = "", dependencies: Optional[List[str]] = None) -> None: + """Register a script and its dependencies.""" + with DEPS_LOCK: + _ensure_deps_initialized() + + dependencies = dependencies or [] + _DEPS_CACHE["scripts"][script_id] = { + "name": name, + "version": version, + "dependencies": list(set(dependencies)), # Remove duplicates + "required_by": [], + } + + # Update reverse dependencies + for dep_id in dependencies: + if dep_id not in _DEPS_CACHE["scripts"]: + _DEPS_CACHE["scripts"][dep_id] = { + "name": "", + "version": "", + "dependencies": [], + "required_by": [], + } + if script_id not in _DEPS_CACHE["scripts"][dep_id]["required_by"]: + _DEPS_CACHE["scripts"][dep_id]["required_by"].append(script_id) + + _persist_deps() + logger.log(f"LuaTools: Registered script {script_id} with {len(dependencies)} dependencies") + + +def get_script_dependencies(script_id: str) -> List[str]: + """Get direct dependencies of a script.""" + with DEPS_LOCK: + _ensure_deps_initialized() + + if script_id in _DEPS_CACHE["scripts"]: + return _DEPS_CACHE["scripts"][script_id].get("dependencies", []) + return [] + + +def get_all_dependencies(script_id: str) -> Set[str]: + """Get all transitive dependencies of a script (recursive).""" + visited: Set[str] = set() + + def _traverse(script: str) -> None: + if script in visited: + return + visited.add(script) + + with DEPS_LOCK: + _ensure_deps_initialized() + if script in _DEPS_CACHE["scripts"]: + for dep in _DEPS_CACHE["scripts"][script].get("dependencies", []): + _traverse(dep) + + _traverse(script_id) + visited.discard(script_id) # Don't include the script itself + return visited + + +def check_for_missing_dependencies(script_id: str, installed_scripts: List[str]) -> Dict[str, Any]: + """Check if a script has missing dependencies.""" + all_deps = get_all_dependencies(script_id) + installed_set = set(installed_scripts) + missing = all_deps - installed_set + + return { + "script_id": script_id, + "all_dependencies": list(all_deps), + "installed_dependencies": list(all_deps & installed_set), + "missing_dependencies": list(missing), + "has_missing": len(missing) > 0, + } + + +def get_dependent_scripts(script_id: str) -> List[str]: + """Get all scripts that depend on this script.""" + with DEPS_LOCK: + _ensure_deps_initialized() + + if script_id in _DEPS_CACHE["scripts"]: + return _DEPS_CACHE["scripts"][script_id].get("required_by", []) + return [] + + +def check_for_circular_dependencies(script_id: str) -> Dict[str, Any]: + """Check if a script has circular dependencies.""" + visited: Set[str] = set() + path: List[str] = [] + + def _detect_cycle(script: str) -> Optional[List[str]]: + if script in visited: + if script in path: + # Found cycle + cycle_start = path.index(script) + return path[cycle_start:] + [script] + return None + + visited.add(script) + path.append(script) + + with DEPS_LOCK: + _ensure_deps_initialized() + if script in _DEPS_CACHE["scripts"]: + for dep in _DEPS_CACHE["scripts"][script].get("dependencies", []): + cycle = _detect_cycle(dep) + if cycle: + return cycle + + path.pop() + return None + + cycle = _detect_cycle(script_id) + + return { + "script_id": script_id, + "has_circular_dependency": cycle is not None, + "cycle": cycle, + } + + +def resolve_installation_order(script_ids: List[str]) -> Dict[str, Any]: + """Resolve the correct installation order for a group of scripts.""" + all_scripts = set(script_ids) + + # Add all transitive dependencies + all_needed: Set[str] = set() + for script in script_ids: + all_needed.add(script) + all_needed.update(get_all_dependencies(script)) + + # Topological sort + ordered: List[str] = [] + visited: Set[str] = set() + + def _visit(script: str) -> bool: + if script in visited: + return True + + # Check for circular dependency + if not _check_circular(script, script, set()): + return False + + visited.add(script) + + # Visit dependencies first + with DEPS_LOCK: + _ensure_deps_initialized() + if script in _DEPS_CACHE["scripts"]: + for dep in _DEPS_CACHE["scripts"][script].get("dependencies", []): + if dep in all_needed: + if not _visit(dep): + return False + + ordered.append(script) + return True + + def _check_circular(current: str, target: str, path: Set[str]) -> bool: + """Check if there's a path from current to target (cycle detection).""" + with DEPS_LOCK: + _ensure_deps_initialized() + if current in path: + return False + path.add(current) + + if current in _DEPS_CACHE["scripts"]: + for dep in _DEPS_CACHE["scripts"][current].get("dependencies", []): + if dep == target: + return True + if _check_circular(dep, target, path.copy()): + return True + return False + + # Attempt to visit all scripts + for script in all_needed: + if not _visit(script): + return { + "success": False, + "error": f"Circular dependency detected involving {script}", + "installation_order": [], + "new_dependencies": [], + } + + new_dependencies = list(all_needed - set(script_ids)) + + return { + "success": True, + "installation_order": ordered, + "new_dependencies": new_dependencies, + "total_scripts": len(ordered), + "message": f"Install in this order: {' → '.join(ordered)}", + } + + +def detect_script_conflicts(script_ids: List[str]) -> List[Dict[str, Any]]: + """Detect known conflicts between scripts.""" + # This would be populated with community-reported conflicts + # For now, return empty list + conflicts = [] + + # Future: Load from conflicts database + # For now, just check for obvious issues + if len(script_ids) > 10: + conflicts.append({ + "severity": "warning", + "message": "Installing many scripts can impact performance", + "scripts": script_ids, + }) + + return conflicts + + +def get_dependencies_json(script_id: str) -> str: + """Get dependency information as JSON.""" + missing_check = check_for_missing_dependencies(script_id, []) + circular_check = check_for_circular_dependencies(script_id) + dependents = get_dependent_scripts(script_id) + + return json.dumps({ + "success": True, + "script_id": script_id, + "dependencies": missing_check, + "circular": circular_check, + "dependents": dependents, + }) diff --git a/backend/statistics.py b/backend/statistics.py new file mode 100644 index 0000000..61247c3 --- /dev/null +++ b/backend/statistics.py @@ -0,0 +1,200 @@ +"""Statistics tracking for LuaTools plugin.""" + +from __future__ import annotations + +import json +import os +import threading +import time +from typing import Any, Dict, List + +from logger import logger +from paths import backend_path +from utils import read_json, write_json + +STATS_FILE = "luatools_stats.json" +STATS_LOCK = threading.Lock() + +# In-memory cache +_STATS_CACHE: Dict[str, Any] = {} +_CACHE_INITIALIZED = False + + +def _get_stats_path() -> str: + return backend_path(STATS_FILE) + + +def _ensure_stats_initialized() -> None: + """Initialize stats file with default structure if not exists.""" + global _STATS_CACHE, _CACHE_INITIALIZED + + if _CACHE_INITIALIZED and _STATS_CACHE: + return + + path = _get_stats_path() + if os.path.exists(path): + _STATS_CACHE = read_json(path) + _CACHE_INITIALIZED = True + return + + # Create default stats structure + _STATS_CACHE = { + "version": 1, + "created_at": time.time(), + "last_updated": time.time(), + "total_mods_installed": 0, + "total_games_with_mods": 0, + "total_fixes_applied": 0, + "total_games_with_fixes": 0, + "total_downloads": 0, + "total_api_fetches": 0, + "games_with_mods": {}, # appid: {name, date_added, mod_count} + "games_with_fixes": {}, # appid: {name, date_added, fix_list} + "daily_stats": {}, # date: {downloads, fixes_applied, mods_added} + } + _persist_stats() + _CACHE_INITIALIZED = True + + +def _persist_stats() -> None: + """Write stats to disk.""" + try: + path = _get_stats_path() + _STATS_CACHE["last_updated"] = time.time() + write_json(path, _STATS_CACHE) + except Exception as exc: + logger.warn(f"LuaTools: Failed to persist stats: {exc}") + + +def record_mod_installed(appid: int, app_name: str = "") -> None: + """Record that a mod was installed for a game.""" + with STATS_LOCK: + _ensure_stats_initialized() + _STATS_CACHE["total_mods_installed"] = _STATS_CACHE.get("total_mods_installed", 0) + 1 + + if appid not in _STATS_CACHE["games_with_mods"]: + _STATS_CACHE["total_games_with_mods"] = _STATS_CACHE.get("total_games_with_mods", 0) + 1 + _STATS_CACHE["games_with_mods"][str(appid)] = { + "name": app_name, + "date_added": time.time(), + "mod_count": 0, + } + + game_entry = _STATS_CACHE["games_with_mods"].get(str(appid), {}) + game_entry["mod_count"] = game_entry.get("mod_count", 0) + 1 + _STATS_CACHE["games_with_mods"][str(appid)] = game_entry + + _record_daily_stat("mods_added", 1) + _persist_stats() + logger.log(f"LuaTools: Recorded mod installation for appid {appid}") + + +def record_mod_removed(appid: int) -> None: + """Record that a mod was removed from a game.""" + with STATS_LOCK: + _ensure_stats_initialized() + if str(appid) in _STATS_CACHE["games_with_mods"]: + game_entry = _STATS_CACHE["games_with_mods"][str(appid)] + mod_count = game_entry.get("mod_count", 1) + if mod_count > 1: + game_entry["mod_count"] = mod_count - 1 + else: + del _STATS_CACHE["games_with_mods"][str(appid)] + _STATS_CACHE["total_games_with_mods"] = max(0, _STATS_CACHE.get("total_games_with_mods", 1) - 1) + _persist_stats() + + +def record_fix_applied(appid: int, app_name: str = "", fix_type: str = "") -> None: + """Record that a fix was applied to a game.""" + with STATS_LOCK: + _ensure_stats_initialized() + _STATS_CACHE["total_fixes_applied"] = _STATS_CACHE.get("total_fixes_applied", 0) + 1 + + if appid not in _STATS_CACHE["games_with_fixes"]: + _STATS_CACHE["total_games_with_fixes"] = _STATS_CACHE.get("total_games_with_fixes", 0) + 1 + _STATS_CACHE["games_with_fixes"][str(appid)] = { + "name": app_name, + "date_added": time.time(), + "fix_list": [], + } + + game_entry = _STATS_CACHE["games_with_fixes"].get(str(appid), {}) + fix_entry = { + "type": fix_type, + "date_applied": time.time(), + } + if "fix_list" not in game_entry: + game_entry["fix_list"] = [] + game_entry["fix_list"].append(fix_entry) + _STATS_CACHE["games_with_fixes"][str(appid)] = game_entry + + _record_daily_stat("fixes_applied", 1) + _persist_stats() + logger.log(f"LuaTools: Recorded fix application for appid {appid}") + + +def record_fix_removed(appid: int) -> None: + """Record that a fix was removed from a game.""" + with STATS_LOCK: + _ensure_stats_initialized() + if str(appid) in _STATS_CACHE["games_with_fixes"]: + game_entry = _STATS_CACHE["games_with_fixes"][str(appid)] + if "fix_list" in game_entry and game_entry["fix_list"]: + game_entry["fix_list"].pop() + if not game_entry["fix_list"]: + del _STATS_CACHE["games_with_fixes"][str(appid)] + _STATS_CACHE["total_games_with_fixes"] = max(0, _STATS_CACHE.get("total_games_with_fixes", 1) - 1) + _persist_stats() + + +def record_download(file_size: int = 0, success: bool = True) -> None: + """Record a download event.""" + with STATS_LOCK: + _ensure_stats_initialized() + _STATS_CACHE["total_downloads"] = _STATS_CACHE.get("total_downloads", 0) + 1 + _record_daily_stat("downloads", 1) + if file_size > 0: + _STATS_CACHE["total_bytes_downloaded"] = _STATS_CACHE.get("total_bytes_downloaded", 0) + file_size + _persist_stats() + + +def record_api_fetch(success: bool = True) -> None: + """Record an API fetch event.""" + with STATS_LOCK: + _ensure_stats_initialized() + _STATS_CACHE["total_api_fetches"] = _STATS_CACHE.get("total_api_fetches", 0) + 1 + _persist_stats() + + +def _record_daily_stat(stat_name: str, value: int) -> None: + """Record a daily statistic.""" + today = time.strftime("%Y-%m-%d", time.localtime()) + if today not in _STATS_CACHE["daily_stats"]: + _STATS_CACHE["daily_stats"][today] = {} + daily = _STATS_CACHE["daily_stats"][today] + daily[stat_name] = daily.get(stat_name, 0) + value + + +def get_statistics() -> Dict[str, Any]: + """Return current statistics.""" + with STATS_LOCK: + _ensure_stats_initialized() + return { + "total_mods_installed": _STATS_CACHE.get("total_mods_installed", 0), + "total_games_with_mods": _STATS_CACHE.get("total_games_with_mods", 0), + "total_fixes_applied": _STATS_CACHE.get("total_fixes_applied", 0), + "total_games_with_fixes": _STATS_CACHE.get("total_games_with_fixes", 0), + "total_downloads": _STATS_CACHE.get("total_downloads", 0), + "total_api_fetches": _STATS_CACHE.get("total_api_fetches", 0), + "total_bytes_downloaded": _STATS_CACHE.get("total_bytes_downloaded", 0), + "games_with_mods_count": len(_STATS_CACHE.get("games_with_mods", {})), + "games_with_fixes_count": len(_STATS_CACHE.get("games_with_fixes", {})), + } + + +def get_statistics_json() -> str: + """Return statistics as JSON string.""" + import json + stats = get_statistics() + stats["success"] = True + return json.dumps(stats) diff --git a/public/luatools.js b/public/luatools.js index dbc61c1..3605434 100644 --- a/public/luatools.js +++ b/public/luatools.js @@ -172,6 +172,12 @@ removeBtn.style.display = 'none'; const fixesMenuBtn = createMenuButton('lt-settings-fixes-menu', 'menu.fixesMenu', 'Fixes Menu', 'fa-wrench'); + + const favoritesBtn = createMenuButton('lt-settings-favorites', 'menu.favorites', 'Favorite Games', 'fa-star'); + + const searchBtn = createMenuButton('lt-settings-search', 'menu.search', 'Search Games', 'fa-magnifying-glass'); + + const activityBtn = createMenuButton('lt-settings-activity', 'menu.activity', 'Activity Monitor', 'fa-chart-line'); createSectionLabel('menu.advancedLabel', 'Advanced'); const checkBtn = createMenuButton('lt-settings-check', 'menu.checkForUpdates', 'Check For Updates', 'fa-cloud-arrow-down'); @@ -291,6 +297,30 @@ }); } + if (favoritesBtn) { + favoritesBtn.addEventListener('click', function(e){ + e.preventDefault(); + try { overlay.remove(); } catch(_) {} + showFavoritesPanel(); + }); + } + + if (searchBtn) { + searchBtn.addEventListener('click', function(e){ + e.preventDefault(); + try { overlay.remove(); } catch(_) {} + showSearchAndFilterUI(); + }); + } + + if (activityBtn) { + activityBtn.addEventListener('click', function(e){ + e.preventDefault(); + try { overlay.remove(); } catch(_) {} + showActivityDashboard(); + }); + } + try { const match = window.location.href.match(/https:\/\/store\.steampowered\.com\/app\/(\d+)/) || window.location.href.match(/https:\/\/steamcommunity\.com\/app\/(\d+)/); const appid = match ? parseInt(match[1], 10) : (window.__LuaToolsCurrentAppId || NaN); @@ -868,7 +898,55 @@ backendLog('LuaTools: Applying fix ' + fixType + ' for appid ' + appid); - // Start the download and extraction process + // First check for conflicts before applying + try { + Millennium.callServerMethod('luatools', 'CheckFixConflicts', { + appid: appid, + fix_type: fixType, + contentScriptQuery: '' + }).then(function(conflictRes){ + try { + const conflictPayload = typeof conflictRes === 'string' ? JSON.parse(conflictRes) : conflictRes; + + if (conflictPayload && conflictPayload.success && conflictPayload.conflicts && conflictPayload.conflicts.length > 0) { + // Show conflict warning + const conflictMsg = lt('Potential conflicts detected:') + '\n' + conflictPayload.conflicts.join('\n') + '\n\n' + lt('Continue anyway?'); + showLuaToolsConfirm('LuaTools', conflictMsg, + function() { + // User confirmed - proceed with fix + startFixApplication(appid, downloadUrl, fixType, gameName); + }, + function() { + // User cancelled + backendLog('LuaTools: User cancelled fix due to conflicts'); + } + ); + return; + } + + // No conflicts - proceed + startFixApplication(appid, downloadUrl, fixType, gameName); + } catch(err) { + backendLog('LuaTools: CheckFixConflicts parse error: ' + err); + startFixApplication(appid, downloadUrl, fixType, gameName); + } + }).catch(function(err){ + backendLog('LuaTools: CheckFixConflicts error: ' + err); + // Proceed anyway on error + startFixApplication(appid, downloadUrl, fixType, gameName); + }); + } catch(err) { + backendLog('LuaTools: Conflict check failed: ' + err); + startFixApplication(appid, downloadUrl, fixType, gameName); + } + } catch(err) { + backendLog('LuaTools: applyFix error: ' + err); + } + } + + function startFixApplication(appid, downloadUrl, fixType, gameName) { + // Start the download and extraction process + try { Millennium.callServerMethod('luatools', 'ApplyGameFix', { appid: appid, downloadUrl: downloadUrl, @@ -898,7 +976,9 @@ ShowLuaToolsAlert('LuaTools', msg); }); } catch(err) { - backendLog('LuaTools: applyFix error: ' + err); + backendLog('LuaTools: startFixApplication error: ' + err); + const msg = lt('Error applying fix'); + ShowLuaToolsAlert('LuaTools', msg); } } @@ -2075,6 +2155,36 @@ restartBtn.after(iconBtn); window.__LuaToolsIconInserted = true; backendLog('Inserted Icon button'); + + // Add Statistics button right after icon button + try { + if (!document.querySelector('.luatools-stats-button') && !window.__LuaToolsStatsInserted) { + const statsBtn = document.createElement('a'); + if (referenceBtn && referenceBtn.className) { + statsBtn.className = referenceBtn.className + ' luatools-stats-button'; + } else { + statsBtn.className = 'btnv6_blue_hoverfade btn_medium luatools-stats-button'; + } + statsBtn.href = '#'; + statsBtn.title = 'LuaTools Statistics'; + statsBtn.setAttribute('data-tooltip-text', 'LuaTools Statistics'); + // Normalize margins + try { + if (referenceBtn) { + const cs = window.getComputedStyle(referenceBtn); + statsBtn.style.marginLeft = cs.marginLeft; + statsBtn.style.marginRight = cs.marginRight; + } + } catch(_) {} + const sspan = document.createElement('span'); + sspan.textContent = '📊 Stats'; + statsBtn.appendChild(sspan); + statsBtn.addEventListener('click', function(e){ e.preventDefault(); backendLog('LuaTools stats button clicked'); showStatisticsDashboard(); }); + iconBtn.after(statsBtn); + window.__LuaToolsStatsInserted = true; + backendLog('Inserted Statistics button'); + } + } catch(_) { backendLog('Failed to insert stats button: ' + _); } } } catch(_) {} window.__LuaToolsRestartInserted = true; @@ -2397,6 +2507,577 @@ }; // Use MutationObserver to catch dynamically added content + // Statistics Dashboard UI + function showStatisticsDashboard() { + if (document.querySelector('.luatools-stats-dashboard')) return; + + ensureLuaToolsAnimations(); + const dashboard = document.createElement('div'); + dashboard.className = 'luatools-stats-dashboard'; + dashboard.style.cssText = ` + position: fixed; + top: 20px; + right: 20px; + width: 320px; + background: linear-gradient(135deg, #1b2838 0%, #2a475e 100%); + border: 2px solid #66c0f4; + border-radius: 8px; + padding: 16px; + z-index: 99998; + color: #fff; + font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, sans-serif; + box-shadow: 0 20px 60px rgba(0,0,0,.8), 0 0 0 1px rgba(102,192,244,0.3); + animation: slideUp 0.3s ease-out; + max-height: 80vh; + overflow-y: auto; + `; + + const title = document.createElement('div'); + title.style.cssText = 'font-size: 18px; font-weight: 700; margin-bottom: 12px; color: #66c0f4; display: flex; justify-content: space-between; align-items: center;'; + title.innerHTML = 'LuaTools Stats×'; + dashboard.appendChild(title); + + const content = document.createElement('div'); + content.style.cssText = 'font-size: 13px; line-height: 1.6;'; + content.innerHTML = '
idYX>z`^E0C4v>;g{QhxU!Cw2?YhcMZf{Ap0cOZqp_& z`=2wMA*HDC-j1L%XU_cRKiB{J{{QfKQ4x4?br@a`!F};z>Xm&cGB }}}4V8sFyU-hUTox)1712^Tl!|G|8}f!qXvr7yg-U6u zVv6-wWLgK3iloWn4f&QTPNtQp7Ke&NT)QPsCb2k@md2CGgeqQ_qBmqE7WkaZ(a-3D zq$o*EBGT%p-=^C~;;N=| r z)Rc^&R2E4|Sd}Ah#wH_bd 0?-W!-ZN|pX+zvTx6!RQ0WRoT!H&Pf) zSaKb?(xKcpsR9*BGJje!2KuI$FbZ;}-u$y8S2VR6vJ*~IB8)n6qY2r%{uUvL>MHoo zSl`*Mx2*3jvwe^rP4mro)}fF+%xs^J(eo7NcVu4h7h$_& ! zHkTX>F(t)ufaiNtsdED&PA6owi*FInr&Ue7E{jTfM#k}|m{bzC#6z+8R9sWVww9BA zA|u_5&!lI>_BPS@!1DuJbzABNZ%R=OG#o!BMyDkVFEUm^MQ XYRNQ ~=(BvWe^&q@hZ7NJtcZ^czPz@Pn`EY;^dkbPNv zZ36UxHW>2NruuQeL+9gaTv0ViiOM>wYFKy1RXTi~O~{IF!?MoJNLq9n4n9X$O=roe zB&S9c^>h7QS1#b>6qaVhvvF80@zBlA wy^^!9E5;Fz z5N(0fExn4)05&H=yCL;pE^&+jdl30oCn1|fFZLi$ |OJB zFZ;XKOMIIS P_YEgM*YS4dZr|JAg|` ^))Q0YeLhq&;*rFgp%jb6N*~)0xu5ur?%7sGr#7z<^+2pLNo;}Ycgxlj#d$- zAlZ=vPlb7j2FqIVTH$=>MrB#nZm`#v*##SBiqTP<*;YiYs2=S_6Gf)xFaUIj3pq}* zHl*0XwqS|*CI2*ST46&@@@HC_NHlp(xl63si^8>{8D~fcdBTpc^O!Bf0~ovBu)o2~ zIK~}yD9o9XA{uwrqy4BES+jy!P6CnaLn8c*ws*7p&?K`TO_ZAbIFZzVkYa!7++AV& z_PMLJeJJEq?7jB-+;ftdME{3A-2bya{6(36;4cs=5Ti+DGCmamMi8~62pCV$;G5w| zCHRD=0GLEb)Uv=(wCOI9TAfJkIgy}*1$d@SQ#lhkj8=rg#N?!Sh}v3n0SXek^NSiG zH*)lPS9jM-LPUG@1rPl{8UjaE423lP#UVdM;UsCS!4BYBbZ3Z+cb& zxQLNSV_Aw7XzCI(jXQ}^>f9o?h3ts2z#(Lw|1}()Gq|ASNhnPv#-^qE{CYN`v91 z4ou_>5}`0VK}eG(^LSfF5@gTsAu_=o3^QaICLGDUzJlYqU7vIr4A*V;7PgEtD42Em zr#WhcD`;Ui&F33iIKrGc1F059@37u{2#vdf76jybyL|DOH5w6}|2vrXJpBJX$UE!M z<#tO0tOEg-5C8_47(2$mEOJl??S6=m0LBZxmp#)Nl%r{^#^02I_EcHI(P`>*(c>qA z ReotXH;AaW7h4lcvRC_@VhVpEu8}vi>Gu4 zs0PQ$?*;)$lXN?=1YAKSfN9dQUYw_Xa_BLv2h!byc8u@7$+!}WY!y; ytVW42jgqr!^_^ozbvf=p4)ri!E5(lTlnru?TNY3jgp%Az()txN&?Fzfv45G z=lkDtJt)3kyi$GWF1N8~-$L=qp4PQJ9m{( ;4aK_4}`_`)b}Dxi_-Vkllau zQ{S;?EZTeUUdJ;RDr;B>|BLoedpNn+yBJ*ze>nCt@292bf4dLuK8UMfcwpAZ@4$e- z2=6CZBgyFcJ1})obo*#hk^Q`GORDs%Zhs>gS9FJb6R=0cE@+_J-;%JR+mo jH77;RA7qbymI^)m@~9>^j{O+yshL z(jVO}v8RXbmk!uSobf-sRk$14oFI+Afb5UpdiapH;@yMy4!+xbuX#T6uygUsN_pSx z@KbOjPTW3m=gjRh^G$0) =5E==Brojbx(xS*^w{KG0^N^i#on~D)^ow8?-jNKeg1q)RFb|uDbf5`@eF3 zwaLNfuU>eNr(&z*>7~%pVAj{S>gq3${8D`#=KgWL(03Yr+*;Px#D0QYQ2xX%^as!< zO?=-;=O-O3sX0mW(|msu``EFozma_`GNjzdkvzZ;)UuB|cMVjtKP_jV{L^X<@-0pl zv1h mtEtE@0bVOoh}#Q^W5a=deF!g8`MA2V^&9H|6o1a4T_w&B6+ bVJmi!@CTD5EJz?0j2-)p0<}9LAfg3#;WY%YggX!2N=C-g#8c zg3(y32cvNvwOmLM)}S8w-QX?_nywLe7-pnU;I`zVdYqgoa1s6=9VOc3%iN_*An&-G zQ;a5tPeuTX5}pzC(9wdU)0~$N0a|I`B#8wQrauV5X&?~DwB@Bu?UX-TfwO4fDylc) zz qw2 %Mws0jyZ6qk z?{&@}UKsh2??>1FY5IrL*;g)PgW;9q7gyRZW!o-qvP{|Q% |7aYp 3l%K?Sz1;#FhfJSORS>@whInrvsJ1q3}>i!)g^9^PoRMnl#o-g zHJUNBYY +nsmER)qSjz5b8SSya`_&-nZE #4p!)5g%q}zYGw<94vWwTooof_ox+d_IDST&AmmOH+%<3~Sp`TbSZdbg zpAbIfgCeUp2z)G&o#>wu69;2*(n#DQHy{vu$khd*mIP}$<$kFaetRT8WrJG>f?qVj zt-}Ghb#N5uAz{O|1fvncj;&4v+&Fj?N3UbbW<2vZ4saXrkPktuk~bVU?kWk5;TkVr z2@)yrpoux{s!okcuediceAPp z^vR1YVh{!k< )?17oAT s%D*S;MEGtdao`tb$~~RXG{Cg;5l>a2SLh5b1w|%;3W^ z!hZZaeAe5x>S~93&P{tYSFuskzSyu*(>YhXQ6{dG)y+Ac=EN!ve#AZ;{6~J_JK3WX zS?`rq*VQN9ns-m#JGJ4d-tg70`|38G$hU73*?l}jDIQd{Z>{q1a^>MUVWZT0ckpir zYnsa17AqE=i 4-CLF8&umTi(v797*{b1H&&ayB`VYBa zroy7_GfV1{ob?T?x&}8&d^D7)_)^^qQ(Wld`#A_lY9N2i@%=~G$0arWE$rhK4$2Uz z5-X+gDfKRlQh?O;Kn4s!ypnwFmlcG7Cg@K*6Tm6WgjlX#HJOA&>6o@A9{ks9dq{1i z?u@{$L?aQMk3=TZaBnY1A{e+D8GaN272ToTN`aYR!a$fnHr_=tLIt`|@H>)m?uf%h zP83|GN77n6q0%53kCG;1Bm<&Ah9A~XN`$ZIS@;l9&%rn_uG2Hg82q$x2498mfQv}| zHe}$CFw8Ge$8S*GXUP34 `01za6QUCw| literal 0 HcmV?d00001 diff --git a/backend/__pycache__/download_history.cpython-313.pyc b/backend/__pycache__/download_history.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..de5b2856d917f8f33949ed8fd10f0586d6a52f91 GIT binary patch literal 8302 zcmbtZdvH@%dOuh9>MdI~mTdXS^#g+pmhFUBj03Ta9k7iIzSagXL>01Z6oh2Xl>=y} z)Vnh+UYagscPVVLosdmu3MFm4o!KePOlN?d&g`F3>@CWTm+j25JLNxEXjsyjw%>QI z?v-o|-KNLZx#ynu RX!xfkkcCnwyJ^-#HFTMK!T{X~W2 zpdP70DykxH3A9r3NX0Bw1t}*>RZEppl~jW1w>VlORZBHeY3{97s+Cr;T6I#Lv|1|5 z<*$Y}QF7(pM5$hKvs(4gYdK3bO!D4_)Y_eilkr$0G%g;E%E<&hEhcFwd@LHD6eki? z9GVV|CK55(cOMtv +osR%>RgJKz;!opAtDw|@7 z$;k+Xl;a3=8i|hwr$Wi2UV~z#k k z=>`#oJFFB3ckPx&her+sd%Jsg_4P;p9S%R7+hX=WVSpd^NQA^iG6o`{hB1Sd+LX&{ zf@S%=25N-@#l3U*4gb(^_fCC;{@p`;*T^GmrTBY05ACIiNg6sX_D5q8SzLE&>t^pE zDH#bJKQtAao{YxjL$Rcc@9OKJ+` vGSKgOL1+^<1bu$&F!a7Bu0ElQEN)H)7FJ z`R9l|pVm?VqV1#JyOWSQGFJ3#jT4}ZCtzer7;|Z*BYH~9AtbLZZA3CkCZBME8v}J$ zgEm`GIM!P|8N n^Uk|KgC*1l8W2vbyw2dxMvLo}`k z$>{NjQWBiPxyZU?UNg`dti(r7g=L(tC{o;3r8*di%hNQXi*Qumr*YZ@HL#g{8KN2T z#6m1ppP4H^h`$%lT8i)4Yv%2BGdmZ}*0U$Sb@J?)Z=Ja?e#cy$F;_n@7_ED_#oFd8 z$@hb^{%rl4w4*NTSoOd`T<*p4s!P%DMZbS6>#oQ)G(NN%%S|)>#|~n2F9_w?%G!)j z{`A2Gdo2H^VmrY0uyt$56&G^q&uSEd5GGUAtv2>h2=9x@ bf?< zt~afn`UGj^R*+T$O6#OSV$T9; cYk!)&0V< z?)Qs!7aS3mS&Glw&)L)EJ8w!i`_is~JC?zWWe|@DO23j3A--KzOa98%ZR7u@vfIet zG8!Qn)&UGEAbP{*bB`z-3jjX6s=+|Sf@8^pM}P#lfFZyKg#1B`=tritE(9as2L_)x zZp YfV !{KA6Kaq7jcS} !QS= zZk3T;e+L)35D-Sp@VYEW`UM^!o%;zHhf(%afi~c{EUZLh^7JAgNIYs!fYYQq1U*Q? zFee$I%!Fn8n )r>O ?r5wsPS0R_`-io2~ia+OrwH_OxEa730B(*)yI)Q5FS7!lHy zC<3Swp)^5hC6qFHl`6}_#7@oJNmYn?=i
I(7uRk0#QL|^ z>-G!A3-L_Jn!9bAKlI+%1#d^+b!InorOVf4O1yW=JlQoZm)E}Q|7}^-;*LFccI?aS z*q0u8D;+yNza#ztrpomg9s*v$;|Kn9 t2btP|~niQhv$yJzKiAeXe@0DE;c_ zzaEKZ0&k`hC)1;+($%LIN>bU9@+XY|B(pUSEu_5TaUHQbRVL6jXTSQ#>8?X**Wm@r zn@`s2XoFV3*_Pfm{--?-h<|3Q!MM%V`>Hw2L75g1Y8mhdkoz@(XE$0B0d7TA{{I3G zxFS&DB_RM%^E}lDg*D+{fx? *dvpL`gi0s+3~ zZu|qqH`~yvitZv4K9sCdRkA<_I-Zzy0V; }
R*Y)~jus2tq#~4f}ng`FjGkkKr(KOO^}hjN76J%u!_=pFTb% ztCkSTJV^RZEH&qRRfVRIQn}J5Gba}vfJT1?KN$y`A@_ kZOPAP?;L|4n9B#5k^v zXnPtb0ia^v;ABG^SbRXrstHMyuGmUPMv2!cStU}ey@oxtgdl#!!Djs${Rz+QkN;Zz z8K2vq05Yi?b&{!y1Ype?CDTYy+$fpjrhoy73Wm@qtP<)K>=FR`b9TuhS+%`=&JAPB z*JuT=0PG%E7Hs#-uv)V|D%^ 8-{nicMz)C&oBjS7{dn@o9oK?UWFi8%d8BlT=xoiTN6X6#JpWKU6FF;d` zgche75D!9=lQc33jNSX#kPIYfz~s0`2m_5E*cX|a z?1DQ6j$Ma3D9i;3TshjYzfCS0MY0F51`l=8u!RR#Bd>Bm#6!QAV>c@}%MHGEg&SW2 zFV?QfNIY_Cif&h0mLN^RBD!LUa405k_vv-fbtQumlKX%=Y0to?sT=hlxPRzQukM)Z zxKp_)UAgJg)Kgi28sDxHxk-4kz?cA|(iF{>`{NLS1@Bzylav)#j-XFW=RK9OQlheT z{mPM*GM!TtWIA$ ZEnxDZU+ZOiS0=(5Vh~rJpCQm14G pTkYF> %Sx%RyufufPW5T$u9_rlQ z;p^LM`dMptJG}hMS{KIKZ2eYq7`oMiZb1AcWG%l;+X_CaNF1Z@hy^TY*96$r*ar(M zdm-}P3!wBo{%P0%c)-5^%6Od^R` )!<*ZF!C30QSKZ0u4;U}ZNi^+JldEGn3%oDY^#GO>OWx=ww zfRN)od$y&M|J2rPF$36{Xe2oe-y(SU+y@LR1jUNrPsAgDhlq<_hiYFOfi!crDSQ;X zYs}-u^44PkOW|B7VioI!;`w^{QPzt>ytaaVkclb65%|)n0x)aDEOFKmPC{)i6g0}` za`#`tz6Hhy^CPBQ_(gPV>Bq~%&s1ZR%`KIpFUH%bn1is#;b2g)1%ngQupyCPkOHkE z^mUAYet~6{Jc?gY!JEpgFU6ERJ%uj<) |>DX z;CEsTVJHzkmKO%a84MPD?}aJA?M^5zR{b>4gsFOrHe-a22xiPN+RTV9Lq8@SbeSkw zx{fEtr(==rbPAer@RR=(BJlcioI+ZELmGZf>|YS;7o_A1V*eG{Fi$r8hE)EN)cuOA z{Wnirnz*x0&-pXw&dfVk&p5z4+R!vpe4%)rG=h^fO;%+qs?)^v*u-&dk1Zaq?(t?5 tH^hCps+=3+zI@5e1-ZxVHm>5!DkHb;%MvqZe(dsaw*S~{ H&!liIgarAoMV0!w>;duqluokdnoC z9mS`6LOW?hIY|X2IV(C%H}p2UW}57V+tY6KN}{ISWFSBS1;ci-akuMpdb&!@R<5+$ zv)^|IGXp} Pl@WsHp6<&2!%6^w%1m5h?yRg4PmvH^9!hS8+I z)iPR$lMm?n^^BgxDFzJvM#k80Vod#J#@wIB `TZ8gLf$I}3i=C~LISG>iu#L~ zA_A)ito=5|)?dsNlV{C9Nxz*bC9rm&tiPNo?{_c`@~j)E=&xid`>U9${%WSWzlN#l zU&pL-Q0pnC)~!EQ=Qh+r(~l{KhWM@!@0HZ1^aE4xu5lZSsTXBTgL|FZM4(1@t=mkX zCV=t?)a St(X z=eX3_GP`|`FXW#HhQ=L}V~%|@zTwHqiLk@x_qX^bPlqNZeSXK-#N=s+3i%yOV8S;Y z@H=LvgA>8&U?AN55%!alPHKIj(B!mlIye~$55lvoJ2*NGcQfYT^-Tr6Cw-ydSRg#@ zR3r_qg9rC^d-oma^Y-rD*ON5uJK*Z>>Gndjn~TwS+&x2HS6|QIaMF0VXV<{qVQ=@| zA@87TKO~0;uV=4!@9qPGTyjG?rl0#9%FPRe!ZS?3dt!Qe$~!s{41};Yiiyeb@c`qL zBz4Dqqb~(Qe(#iT`b1JYHFJC-I7%KMnUM*MPBMNk-vf}|<_}B 45K~Hy8>} z`z9v1kBujMVeiC@&pS5BKuf2b@}y>Tawas5%?>B6>6%OjPEH9qdi_BrSrnRNPWmQ- zvjL%Vywib~r;} Bx+mhvvU=P#%exzlXy4<|h9(rAu{C<5UIJNC|mEX=6Zh%Ao{&w@q43jnQw@ zgHBmeFEnxAf!za)6*5ao#{<(z#mtl+u+k|_%Axmzex?*($zg=117Rd@4u>Pzx4ZM$ zL1uEC@tt(^1}6exN8QU?Ham~GaXgPrP0WnLm>!#$CgaOJ9!sOhF&+)V%~NNR`QC6~ zS{z4G3R3~4RKZ`k6N0nU`|84!sVrftjG8JJHpNWNgsEk5XimPO(x214uA4s;SJ_t_ zHD5H$&qP(F_mq^T=v?WSO7DlY@cHLETx#hRwZ>JX9EI)>E@2qI2vNW8Fs`TgF3&Y& zJh2N)vWKVoDTaqBh6*`lObOh`urPLjlCsgssWWfWOc^|cu^~X2F+Sf5gO=TaHSne$ z{=$d}Tn8FU6UK_Dv0|ZSF)wCpOc*yrjT>UdwtG_2UFV>?)R6e|)tjXMS>uu_N8v%Z zaM F%OFO-O??a@#nspBMG_W3A~qE!GoEq6%NF VCl(r~tt&3@$tjx(^1Aa?Y!4YS0(vVEK+8D7KK|j=KoU2O%yl;cQupR _zqRS|^#6uaVHVMpddI) +(q4Qn$=4?~sP1 z9%(MR!6Ot<0O{ipE+q5Fa+NTGBTFuk0`cROL5Zr65 ;bbtx?n@1;4 zw!kV1e^amk`+^hU9R&Zw+lD_vo)%VS)7az$FeqL~9)xv!I-FEa2TukjXQq=fSQmUr z87Zaakri-^LB7D$!1C?5FX?n7DW{InLFODCGcmqkI6xM6B=$4mr0m7;WGE?xYkTJ15?NoF^FDC*=ZjWa;gbQOe#38Bq^O?CXzBH z5T0VzVQdM*bpA7%~v=cFsff^)O4&qk`2WXs3rW?7?aPPU@bB~+!bCdF0d zE5`hDXI?+^h0j2=$#Ow;UiFRqi0A9YF;jKIv};lJj_PgI_w$!L-z$#QbwSK!$(%f; zEtn5{V>qF$jA|=aEVc`K&+m w@{e)U{;%4myJe@mJ$ zR!5E1i ?G;hgpjEnB#C zxo5fka^+?B 2Cc7U-{@@RGw{=UX>k>_OtMt0g-rXd<-XsTj z4SOUI#c4tm%Q6-Veo}=bEmTM Co`Tv9t9GZCXdPyL@aU*A>WgA0eMuC zo=|y|Imn~Zr5r~x3HJ>0s2)tx%zgRH^jOoDx9OxR5E`BIgG833JCpS3B(0gP%OGAr z|4?obnUYNU#Q=U$P6Odkl9EZFRYbl~1YQn?filV_;aem&K7UdZo&p&TB)>oy;AtN- z9X=fdSxCVZkdzb21Bl&ZFvM(yYzYlxy08EhAz_3#G5aARDd+N0oIqhRkW@@DfwADr zNf|~nyC6jv*R(Y8N;86axCE?i3iB+a+Kb6);9Jg8NVN{oDWfB1tVA-Dha^USuKo4) zbK76v9;r@f%cI)zl(}HuAKATFd(+&yG`3 ; zn{SU)%$Lk5?o`&sOnu+1iB!My*|$Htd@9k>d%^Nn&77Py^`)8gh s z#Z|Q_y?MSmB3&52sc&9dx1!5Cx9|0R5z8gpMH{gAHs@k@)V4mZ+W 7@Dde=4xB@?# DLyOmEOkB=pgf4GJ~%714BlII_vo n{p6emBDU%GYYlv9}z8Q%4f3V}bJ1%cdwDn&Y zek(AiW-S9LtzquW7q)-Hy-@J=XF;IMBC@8!3%c{V$nd437mqHSiIr~nZ%4m7d}CL% z^g!HnkdRx}+ FVyOfgl3yLqb@vkN!JzD^SH>mk^FZSG?BTokx9F3&Ee4?{1J@ zDKP_hrO~##OLkSQz|dF&F;_P>b<@&o6)J$QHE4EsD6ehO0eqd-bQ_h|wNiwQB+S$F zNTk>8_U>)c>)Yf2k4m8&{zBFTp6QQ=fm(f)0>+SkLk$5%$i86dr!!LIU=c;FGA~yG z<3?m{w-gyCx6C6Up%g-Sn>r*LL5fT+k&)sVLbt-LbgKyKC l&@AjnPe2B 54txQIRm~luFgun242#}po$X#>_q$&NIdbVuC_c#7v`=4%)we60Xx _$q3aQauKEZWfv9UC>Q0HhhTwM@YBt$kVALmTIBiu;iQu6 zNm@;0uI$)x`Z1CV%`M_Ebp WC{MS;&we;&kEWiiK!_MInZ$f|!>eO&I^a3jxj-wLFjK{&OvQ`2|^v z-df)OHt_zkmHeU$JI?P|2rL|4bS-R6 d5&pURtO0RTOLwL1R4G~vsB?voJ2)Al>Q_8DdQh={fB$R4)o21u_G=yuW7YrV^ z4x!|kskIe#qr48ACK@278WLuz+znZ3qLc9cbC^aC0qTs4Bb#E{3RYIZ>5Ma1H(?dr z3Hr9La+YDR{fuC($R*Z-pBO$^i$)OR$>kRK-~cVIXFTF(Xmbk?Nk!e~5bF1`E^!3~ zb&Rk!4GH>MF?miEg*2lyXd|qOpv8u1nr;iMw_=`xUR2Dp5_HXE&6PO7vXxU+LA%xb zqz9D_uFFN@_dLY6J){7hLM-t&X@!O_6*S3Wj=9^Q6xyHw!WToGFVy2xji*O7WaC>U z-ud CQfQu#zaYnnxg0qo z^r&{Ef{*uT-8w!V##iUj4OQ{)#Jfks*9|aCg8%q$0Kai=l4}UG?b#RbPkN7bE&sG1 zMri*nPW=XINTUZw4TKZW)c;1kHqywKKGe)Z;vH&Y_6WGlj}=++c0%78hc<|5c}VPW z2eseL)%erlm0J=r5j9#Ks4&eQBeb~wH*K-U+zCP%X#q6HqY9ZE)By=9H9e+L=t+-} zZ>0}tm&fR)k%U1#Onkh5EnUm!M>#-00s~Eaijby^@@R(I_?{B)Zi|p64@NiNlQ+aq zA2AUq75P-q6Mm?bC1doEi_5xT^ol$d9_#q}`!>Q%6GuJwch-l$^TWcE@!giJ!r&3g zv$>1mOP-QiJoz~>L})Ew$QXwqZdp!RE%a`vU<6roa(O6siQB%O=355y>G5+Q*Ua7t z>>S_~pGT KmH zjX~{g)&;AYpq~S|Q1WQzvQIMjg1}z#NCie^AHX4f24#b}ntcI=7|>$qHhT2XmJG=h z?1qw%-lgx8?%zYDVNpf|MmDn~pq?Sa6H79A3~T#~lwoigpatKA(g1p73BWc3MR>$q zbr-k`J0zjJa;lFWT;c7TkM6O-f8hJ;Q9dq%ET3XcJ7I1(P~)(J??-n9k3T>GK>Q3@ zM|Se>hTu8-g8r`LzaTk@^W=#*Ktu(=gEcjhVU5bE{X4-!CvDe7+6h9%BU0iUJ|%y5 zR?YT+7tUxP9Cq+tHDGz24NejFvb0;3V`9eV7z3NE1FWl)j_@@4<1__B4$i