Last active
April 20, 2025 11:23
-
-
Save Fooftilly/25b6a2b8caf4681e6adb84c203cce7f6 to your computer and use it in GitHub Desktop.
Tracking qBittorrent Usage With Graphs
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#!/usr/bin/python | |
import sqlite3 | |
import logging | |
from typing import Dict, List, Optional, Union, Any, Tuple | |
from PyQt5.QtCore import QSettings, QVariant, QTimer, QCoreApplication | |
import sys | |
import signal | |
from datetime import datetime, timedelta | |
import os | |
import notify2 | |
import argparse | |
class Config: | |
"""Configuration management class.""" | |
# File paths | |
DEFAULT_CONFIG_FILE = os.path.expanduser('~/.config/qBittorrent/qBittorrent-data.conf') | |
DEFAULT_DB_FILE = os.path.expanduser('~/qBittorrentStats.db') | |
# Notification settings | |
NOTIFICATION_TITLE = "qBittorrent Stats Update" | |
NOTIFICATION_APP_NAME = "qBittorrent Stats" | |
UPDATE_INTERVAL_MS = 3600000 # 1 hour in milliseconds | |
# Formatting settings | |
TIB_PRECISION = 3 | |
GIB_PRECISION = 2 | |
MIB_PRECISION = 2 | |
RATIO_PRECISION = 4 | |
def setup_database(db_path: str) -> sqlite3.Connection: | |
"""Setup and return database connection.""" | |
conn = sqlite3.connect(db_path) | |
c = conn.cursor() | |
# Create the stats table if it doesn't exist | |
c.execute(''' | |
CREATE TABLE IF NOT EXISTS torrent_stats ( | |
id INTEGER PRIMARY KEY AUTOINCREMENT, | |
timestamp INTEGER NOT NULL, | |
all_time_download_bytes INTEGER NOT NULL, | |
all_time_upload_bytes INTEGER NOT NULL, | |
share_ratio REAL, | |
required_upload_bytes INTEGER | |
) | |
''') | |
# Create index for faster querying if it doesn't exist | |
c.execute('CREATE INDEX IF NOT EXISTS idx_timestamp ON torrent_stats(timestamp)') | |
conn.commit() | |
return conn | |
def setup_logging() -> None: | |
"""Configure logging for the application.""" | |
logging.basicConfig( | |
level=logging.INFO, | |
format='%(asctime)s - %(levelname)s - %(message)s', | |
handlers=[ | |
logging.StreamHandler(), | |
logging.FileHandler(os.path.expanduser('~/qbittorrent_notify.log')) | |
] | |
) | |
def bytes_to_tebibytes(bytes_value: int) -> str: | |
"""Convert bytes to tebibytes string representation.""" | |
tib_value = bytes_value / (1024 ** 4) | |
return f'{tib_value:.{Config.TIB_PRECISION}f} TiB' | |
def bytes_to_gibibytes(bytes_value: int) -> str: | |
"""Convert bytes to gibibytes string representation.""" | |
gib_value = bytes_value / (1024 ** 3) | |
return f'{gib_value:.{Config.GIB_PRECISION}f} GiB' | |
def bytes_to_mebibytes(bytes_value: int) -> str: | |
"""Convert bytes to mebibytes string representation.""" | |
mib_value = bytes_value / (1024 ** 2) | |
return f'{mib_value:.{Config.MIB_PRECISION}f} MiB' | |
def read_qbittorrent_data(file_path: str) -> Optional[Dict[str, Any]]: | |
"""Read qBittorrent statistics from the config file.""" | |
try: | |
settings = QSettings(file_path, QSettings.IniFormat) | |
settings.beginGroup('Stats') | |
all_stats_variant = settings.value('AllStats') | |
settings.endGroup() | |
return all_stats_variant | |
except Exception as e: | |
logging.error(f"Failed to read qBittorrent data: {e}") | |
return None | |
def calculate_stats(all_stats_variant: Dict[str, Any]) -> Tuple[int, int, Optional[float], Optional[int]]: | |
"""Calculate statistics from qBittorrent data.""" | |
download_bytes = all_stats_variant['AlltimeDL'] | |
upload_bytes = all_stats_variant['AlltimeUL'] | |
if download_bytes == 0: | |
share_ratio = None | |
required_upload = None | |
else: | |
share_ratio = upload_bytes / download_bytes | |
desired_ratio = (int(share_ratio * 100) + 1) / 100.0 | |
required_upload = (desired_ratio * download_bytes) - upload_bytes | |
return (download_bytes, upload_bytes, share_ratio, required_upload) | |
def save_stats_to_db(conn: sqlite3.Connection, stats: Tuple[int, int, Optional[float], Optional[int]]) -> None: | |
"""Save qBittorrent statistics to SQLite database.""" | |
try: | |
c = conn.cursor() | |
timestamp = int(datetime.now().timestamp()) | |
download_bytes, upload_bytes, share_ratio, required_upload = stats | |
c.execute(''' | |
INSERT INTO torrent_stats | |
(timestamp, all_time_download_bytes, all_time_upload_bytes, share_ratio, required_upload_bytes) | |
VALUES (?, ?, ?, ?, ?) | |
''', (timestamp, download_bytes, upload_bytes, share_ratio, required_upload)) | |
conn.commit() | |
logging.info("Statistics saved successfully to database") | |
except Exception as e: | |
logging.error(f"Failed to save statistics to database: {e}") | |
conn.rollback() | |
def format_notification_message(stats: Tuple[int, int, Optional[float], Optional[int]]) -> str: | |
"""Format the notification message with current statistics.""" | |
download_bytes, upload_bytes, share_ratio, required_upload = stats | |
all_time_download_tib = bytes_to_tebibytes(download_bytes) | |
all_time_upload_tib = bytes_to_tebibytes(upload_bytes) | |
if share_ratio is None: | |
ratio_str = '∞ (No data downloaded)' | |
additional_upload_str = 'N/A' | |
else: | |
ratio_str = f'{share_ratio:.{Config.RATIO_PRECISION}f}' | |
additional_upload_str = bytes_to_mebibytes(required_upload) | |
return ( | |
f'ALL-TIME DOWNLOAD: {all_time_download_tib}\n' | |
f'ALL-TIME UPLOAD: {all_time_upload_tib}\n' | |
f'RATIO: {ratio_str}\n' | |
f'REQUIRED UPLOAD TO INCREASE RATIO: {additional_upload_str}' | |
) | |
def send_notification_and_save(file_path: str, db_conn: sqlite3.Connection) -> None: | |
"""Send system notification and save statistics.""" | |
try: | |
all_stats_variant = read_qbittorrent_data(file_path) | |
if all_stats_variant is not None: | |
stats = calculate_stats(all_stats_variant) | |
save_stats_to_db(db_conn, stats) | |
message = format_notification_message(stats) | |
else: | |
message = "Failed to read the data." | |
notify2.init(Config.NOTIFICATION_APP_NAME) | |
notification = notify2.Notification(Config.NOTIFICATION_TITLE, message) | |
notification.show() | |
logging.info("Notification sent successfully") | |
except Exception as e: | |
logging.error(f"Failed to send notification: {e}") | |
def parse_args() -> argparse.Namespace: | |
"""Parse command line arguments.""" | |
parser = argparse.ArgumentParser( | |
description="qBittorrent Stats Notification", | |
formatter_class=argparse.RawDescriptionHelpFormatter, | |
epilog=""" | |
Examples: | |
%(prog)s # Run in daemon mode with hourly updates | |
%(prog)s --now # Send notification immediately and exit | |
%(prog)s --interval 30 # Run in daemon mode with updates every 30 minutes | |
""" | |
) | |
parser.add_argument("--now", action="store_true", | |
help="Send notification immediately and exit") | |
parser.add_argument("--interval", type=int, default=60, | |
help="Update interval in minutes (default: 60)") | |
parser.add_argument("--config", type=str, | |
default=Config.DEFAULT_CONFIG_FILE, | |
help="Path to qBittorrent config file") | |
parser.add_argument("--db", type=str, | |
default=Config.DEFAULT_DB_FILE, | |
help="Path to SQLite database file") | |
parser.add_argument("--debug", action="store_true", | |
help="Enable debug logging") | |
return parser.parse_args() | |
def main() -> int: | |
"""Main application entry point.""" | |
args = parse_args() | |
# Setup logging | |
setup_logging() | |
if args.debug: | |
logging.getLogger().setLevel(logging.DEBUG) | |
logging.info("Starting qBittorrent Stats Notification") | |
# Setup database connection | |
db_conn = setup_database(args.db) | |
if args.now: | |
send_notification_and_save(args.config, db_conn) | |
db_conn.close() | |
return 0 | |
app = QCoreApplication(sys.argv) | |
update_interval = args.interval * 60 * 1000 # Convert minutes to milliseconds | |
def schedule_notification(): | |
send_notification_and_save(args.config, db_conn) | |
QTimer.singleShot(update_interval, notify_and_reschedule) | |
def notify_and_reschedule(): | |
send_notification_and_save(args.config, db_conn) | |
QTimer.singleShot(update_interval, notify_and_reschedule) | |
# Schedule the first notification | |
schedule_notification() | |
# Handle graceful shutdown | |
def signal_handler(*args): | |
logging.info("Shutting down...") | |
db_conn.close() | |
app.quit() | |
signal.signal(signal.SIGINT, signal_handler) | |
return app.exec_() | |
if __name__ == '__main__': | |
sys.exit(main()) |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#!/usr/bin/python | |
import sqlite3 | |
import os | |
import argparse | |
from datetime import datetime, timedelta | |
from functools import lru_cache | |
import plotly.graph_objects as go | |
from jinja2 import Template | |
from collections import defaultdict | |
import re | |
import gzip | |
import base64 | |
import json | |
import hashlib | |
class Config: | |
"""Configuration management class.""" | |
DEFAULT_DB_FILE = os.path.expanduser('~/qBittorrentStats.db') | |
DEFAULT_HTML_FILE = os.path.expanduser('~/qBittorrentStats.html') | |
# Time range options | |
TIME_RANGES = { | |
'week': 7, | |
'month': 30, | |
'quarter': 90, | |
'year': 365, | |
'all': None | |
} | |
# Chart colors | |
DOWNLOAD_COLOR = '#3498db' | |
UPLOAD_COLOR = '#2ecc71' | |
RATIO_COLOR = '#e74c3c' | |
# Chart settings | |
MARKER_SIZE = 6 | |
LINE_WIDTH = 2 | |
# Font settings | |
TITLE_FONT_SIZE = 22 | |
AXIS_FONT_SIZE = 14 | |
LEGEND_FONT_SIZE = 12 | |
FONT_FAMILY = "Roboto, Arial, sans-serif" | |
# Cache settings | |
TIMESTAMP_CACHE_SIZE = 128 | |
# Data conversion | |
TIB_TO_GIB = 1024 | |
DAYS_PER_MONTH = 30.44 | |
def parse_args(): | |
"""Parse command line arguments.""" | |
parser = argparse.ArgumentParser( | |
description='Generate qBittorrent statistics dashboard with visualizations for download/upload data and share ratios.', | |
epilog=''' | |
Examples: | |
%(prog)s # Generate dashboard with default settings | |
%(prog)s -t week -d # Show last week's stats in dark mode | |
%(prog)s -t month -o ~/monthly_stats.html # Generate monthly stats to custom file | |
%(prog)s -i ~/custom/stats.db # Use custom input file | |
''', | |
formatter_class=argparse.RawDescriptionHelpFormatter | |
) | |
parser.add_argument('--input', '-i', | |
default=Config.DEFAULT_DB_FILE, | |
help='Path to the input SQLite database file (default: %(default)s)') | |
parser.add_argument('--output', '-o', | |
default=Config.DEFAULT_HTML_FILE, | |
help='Path where the HTML dashboard will be generated (default: %(default)s)') | |
parser.add_argument('--time-range', '-t', | |
choices=Config.TIME_RANGES.keys(), | |
default='all', | |
help='Time range to display in the dashboard:\n' + | |
' week - Last 7 days\n' + | |
' month - Last 30 days\n' + | |
' quarter - Last 90 days\n' + | |
' year - Last 365 days\n' + | |
' all - All available data (default)') | |
parser.add_argument('--dark-mode', '-d', | |
action='store_true', | |
help='Enable dark mode for better viewing in low-light conditions') | |
return parser.parse_args() | |
@lru_cache(maxsize=Config.TIMESTAMP_CACHE_SIZE) | |
def parse_timestamp(timestamp): | |
"""Parse timestamp to datetime string with caching for repeated values.""" | |
return datetime.fromtimestamp(timestamp).strftime('%Y-%m-%d') | |
@lru_cache(maxsize=Config.TIMESTAMP_CACHE_SIZE) | |
def parse_timestamp_with_time(timestamp): | |
"""Parse timestamp to datetime string with time and caching.""" | |
return datetime.fromtimestamp(timestamp).strftime('%Y-%m-%d %H:%M:%S') | |
def bytes_to_tebibytes(bytes_value): | |
"""Convert bytes to tebibytes.""" | |
return bytes_value / (1024 ** 4) | |
def get_data_from_db(db_path: str, time_range: str) -> list: | |
"""Get data from SQLite database with optional time range filter.""" | |
conn = sqlite3.connect(db_path) | |
c = conn.cursor() | |
if time_range != 'all' and Config.TIME_RANGES[time_range]: | |
cutoff_timestamp = int((datetime.now() - timedelta(days=Config.TIME_RANGES[time_range])).timestamp()) | |
c.execute(''' | |
SELECT timestamp, all_time_download_bytes, all_time_upload_bytes, share_ratio, required_upload_bytes | |
FROM torrent_stats | |
WHERE timestamp >= ? | |
ORDER BY timestamp | |
''', (cutoff_timestamp,)) | |
else: | |
c.execute(''' | |
SELECT timestamp, all_time_download_bytes, all_time_upload_bytes, share_ratio, required_upload_bytes | |
FROM torrent_stats | |
ORDER BY timestamp | |
''') | |
data = c.fetchall() | |
conn.close() | |
return data | |
def get_last_entry_per_day(data: list) -> list: | |
"""Extract the last entry for each day.""" | |
day_entries = defaultdict(list) | |
# Group entries by day | |
for entry in data: | |
date_key = parse_timestamp(entry[0]) # timestamp is first column | |
day_entries[date_key].append(entry) | |
# Get the last entry for each day | |
return [entries[-1] for entries in day_entries.values()] | |
def extract_data_points(data: list) -> tuple: | |
"""Extract data points from database entries.""" | |
timestamps = [] | |
dates = [] | |
downloads = [] | |
uploads = [] | |
ratios = [] | |
for entry in data: | |
timestamp, download_bytes, upload_bytes, ratio, _ = entry | |
timestamps.append(timestamp) | |
dates.append(parse_timestamp_with_time(timestamp)) | |
downloads.append(bytes_to_tebibytes(download_bytes)) | |
uploads.append(bytes_to_tebibytes(upload_bytes)) | |
if ratio is not None: | |
ratios.append(ratio) | |
else: | |
ratios.append(0) # Use 0 for visualization when ratio is None | |
return timestamps, dates, downloads, uploads, ratios | |
def create_figure(dates, downloads, uploads, ratios, title, dark_mode=False): | |
"""Create a plotly figure with the given data.""" | |
fig = go.Figure() | |
# Add traces in a single batch | |
traces = [ | |
go.Scatter(x=dates, y=downloads, mode='lines+markers', name='Download (TiB)', | |
line=dict(color=Config.DOWNLOAD_COLOR, width=Config.LINE_WIDTH), | |
marker=dict(size=Config.MARKER_SIZE)), | |
go.Scatter(x=dates, y=uploads, mode='lines+markers', name='Upload (TiB)', | |
line=dict(color=Config.UPLOAD_COLOR, width=Config.LINE_WIDTH), | |
marker=dict(size=Config.MARKER_SIZE)), | |
go.Scatter(x=dates, y=ratios, mode='lines+markers', name='Share Ratio', | |
line=dict(color=Config.RATIO_COLOR, width=Config.LINE_WIDTH), | |
marker=dict(size=Config.MARKER_SIZE), yaxis="y2") | |
] | |
fig.add_traces(traces) | |
fig.update_layout( | |
title=dict( | |
text=title, | |
font=dict(family=Config.FONT_FAMILY, size=Config.TITLE_FONT_SIZE, color="#2c3e50") | |
), | |
xaxis=dict( | |
title=dict( | |
text="Date", | |
font=dict(family=Config.FONT_FAMILY, size=Config.AXIS_FONT_SIZE, color="#7f8c8d") | |
), | |
gridcolor="#ecf0f1", | |
showline=True, | |
linecolor="#bdc3c7" | |
), | |
yaxis=dict( | |
title=dict( | |
text="Data (TiB)", | |
font=dict(family=Config.FONT_FAMILY, size=Config.AXIS_FONT_SIZE, color=Config.DOWNLOAD_COLOR) | |
), | |
tickfont=dict(family=Config.FONT_FAMILY, color=Config.DOWNLOAD_COLOR), | |
gridcolor="#ecf0f1", | |
showline=True, | |
linecolor="#bdc3c7" | |
), | |
yaxis2=dict( | |
title=dict( | |
text="Share Ratio", | |
font=dict(family=Config.FONT_FAMILY, size=Config.AXIS_FONT_SIZE, color=Config.RATIO_COLOR) | |
), | |
tickfont=dict(family=Config.FONT_FAMILY, color=Config.RATIO_COLOR), | |
showline=True, | |
linecolor="#bdc3c7" | |
), | |
template="plotly_white" if not dark_mode else "plotly_dark", | |
legend=dict( | |
orientation="h", | |
yanchor="bottom", | |
y=1.02, | |
xanchor="center", | |
x=0.5, | |
font=dict(family=Config.FONT_FAMILY, size=Config.LEGEND_FONT_SIZE) | |
), | |
margin=dict(l=60, r=60, t=80, b=60), | |
hovermode="x unified", | |
plot_bgcolor="#ffffff" if not dark_mode else "#1f2a33", | |
paper_bgcolor="#ffffff" if not dark_mode else "#121a21" | |
) | |
# Set y-axis 2 properties after layout is created | |
fig.update_layout(yaxis2_overlaying="y", yaxis2_side="right") | |
# Add hover template for better tooltips | |
for trace in fig.data: | |
trace.update(hovertemplate='%{y:.3f}<extra>%{fullData.name}</extra>') | |
return fig | |
def calculate_stats(data: list) -> dict: | |
"""Calculate statistics from the data.""" | |
if not data: | |
return {} | |
latest = data[-1] | |
first = data[0] | |
# Extract values from database records | |
latest_download = bytes_to_tebibytes(latest[1]) # download_bytes is second column | |
latest_upload = bytes_to_tebibytes(latest[2]) # upload_bytes is third column | |
latest_ratio = latest[3] if latest[3] is not None else float('inf') # ratio is fourth column | |
first_download = bytes_to_tebibytes(first[1]) | |
first_upload = bytes_to_tebibytes(first[2]) | |
# Calculate days tracked | |
days_tracked = (latest[0] - first[0]) // 86400 # timestamps are first column | |
# Calculate months tracked (approximate) | |
months_tracked = days_tracked / Config.DAYS_PER_MONTH | |
stats = { | |
"latest_download": f"{latest_download:.3f}", | |
"latest_upload": f"{latest_upload:.3f}", | |
"latest_ratio": f"{latest_ratio:.4f}" if latest_ratio != float('inf') else "∞", | |
"latest_date": parse_timestamp_with_time(latest[0]), | |
"first_date": parse_timestamp_with_time(first[0]), | |
"days_tracked": days_tracked, | |
"months_tracked": months_tracked | |
} | |
# Calculate average daily and monthly download/upload in GiB | |
if days_tracked > 0: | |
try: | |
download_diff_tib = latest_download - first_download | |
upload_diff_tib = latest_upload - first_upload | |
# Convert TiB to GiB | |
download_diff_gib = download_diff_tib * Config.TIB_TO_GIB | |
upload_diff_gib = upload_diff_tib * Config.TIB_TO_GIB | |
# Daily averages in GiB | |
daily_download_gib = download_diff_gib / days_tracked | |
daily_upload_gib = upload_diff_gib / days_tracked | |
stats["avg_daily_download_gib"] = f"{daily_download_gib:.2f}" | |
stats["avg_daily_upload_gib"] = f"{daily_upload_gib:.2f}" | |
# Monthly averages in GiB (if we have enough data) | |
if months_tracked >= 0.1: # At least a few days of data | |
monthly_download_gib = download_diff_gib / months_tracked | |
monthly_upload_gib = upload_diff_gib / months_tracked | |
stats["avg_monthly_download_gib"] = f"{monthly_download_gib:.2f}" | |
stats["avg_monthly_upload_gib"] = f"{monthly_upload_gib:.2f}" | |
else: | |
stats["avg_monthly_download_gib"] = "N/A" | |
stats["avg_monthly_upload_gib"] = "N/A" | |
except (ValueError, TypeError): | |
stats["avg_daily_download_gib"] = "N/A" | |
stats["avg_daily_upload_gib"] = "N/A" | |
stats["avg_monthly_download_gib"] = "N/A" | |
stats["avg_monthly_upload_gib"] = "N/A" | |
else: | |
stats["avg_daily_download_gib"] = "N/A" | |
stats["avg_daily_upload_gib"] = "N/A" | |
stats["avg_monthly_download_gib"] = "N/A" | |
stats["avg_monthly_upload_gib"] = "N/A" | |
return stats | |
def compress_json_data(data): | |
"""Compress JSON data using gzip and base64 encode it.""" | |
json_str = json.dumps(data) | |
compressed = gzip.compress(json_str.encode('utf-8')) | |
return base64.b64encode(compressed).decode('utf-8') | |
def generate_html(db_path: str, output_file: str, time_range='all', dark_mode=False): | |
"""Generate HTML with integrated graphs using SQLite data.""" | |
# Get data from database | |
data = get_data_from_db(db_path, time_range) | |
if not data: | |
raise ValueError(f"No data available for the selected time range: {time_range}") | |
# Process data | |
_, dates, downloads, uploads, ratios = extract_data_points(data) | |
# Create the main figure | |
title_suffix = f" ({time_range})" if time_range != 'all' else "" | |
fig = create_figure(dates, downloads, uploads, ratios, | |
f"Download, Upload, and Share Ratio Trends{title_suffix}", | |
dark_mode=dark_mode) | |
# Process last entries per day | |
last_entries = get_last_entry_per_day(data) | |
_, last_dates, last_downloads, last_uploads, last_ratios = extract_data_points(last_entries) | |
# Create the daily summary figure | |
fig_last = create_figure(last_dates, last_downloads, last_uploads, last_ratios, | |
f"Day Summary - Download, Upload, and Share Ratio{title_suffix}", | |
dark_mode=dark_mode) | |
# Calculate statistics | |
stats = calculate_stats(data) | |
# Compress the plotly data | |
fig_json_compressed = compress_json_data(fig.to_plotly_json()) | |
fig_last_json_compressed = compress_json_data(fig_last.to_plotly_json()) | |
# Generate cache busting hash | |
cache_hash = hashlib.md5(str(datetime.now().timestamp()).encode()).hexdigest()[:8] | |
template = Template("""<!DOCTYPE html> | |
<html lang="en"> | |
<head> | |
<meta charset="UTF-8"> | |
<meta name="viewport" content="width=device-width, initial-scale=1.0"> | |
<title>qBittorrent Stats Dashboard</title> | |
<link rel="preconnect" href="https://fonts.googleapis.com"> | |
<link rel="preconnect" href="https://cdn.plot.ly"> | |
<script src="https://cdn.plot.ly/plotly-3.0.1.min.js" charset="utf-8"></script> | |
<style> | |
/* Critical CSS inlined for faster rendering */ | |
:root{--primary-color:#3498db;--secondary-color:#2ecc71;--accent-color:#e74c3c;--text-color:#2c3e50;--light-text:#7f8c8d;--background:#f9f9f9;--card-bg:#fff;--border-color:#ecf0f1}*{margin:0;padding:0;box-sizing:border-box}body{font-family:system-ui,-apple-system,BlinkMacSystemFont,'Segoe UI',Roboto,sans-serif;background-color:var(--background);color:var(--text-color);line-height:1.6;padding:0;margin:0} | |
</style> | |
<script> | |
// Decompress base64 data | |
function decompressData(compressedData) { | |
try { | |
const binaryString = atob(compressedData); | |
const len = binaryString.length; | |
const bytes = new Uint8Array(len); | |
for (let i = 0; i < len; i++) { | |
bytes[i] = binaryString.charCodeAt(i); | |
} | |
// Use TextDecoder to convert Uint8Array to string | |
const decompressed = new Zlib.Gunzip(bytes).decompress(); | |
const text = new TextDecoder().decode(decompressed); | |
return JSON.parse(text); | |
} catch (error) { | |
console.error('Decompression error:', error); | |
throw error; | |
} | |
} | |
// Initialize charts when DOM is ready | |
document.addEventListener('DOMContentLoaded', async function() { | |
try { | |
// Load required resources | |
await Promise.all([ | |
loadResource('https://fonts.googleapis.com/css2?family=Roboto:wght@300;400;500;700&display=swap?v={{ cache_hash }}', 'style'), | |
loadResource('https://cdn.jsdelivr.net/npm/[email protected]/bin/gunzip.min.js', 'script') | |
]); | |
// Wait a bit for Zlib to initialize | |
await new Promise(resolve => setTimeout(resolve, 100)); | |
// Decompress and create charts | |
const integratedData = decompressData('{{ fig_json }}'); | |
const dailyData = decompressData('{{ fig_last_json }}'); | |
// Function to properly initialize a chart | |
async function initializeChart(elementId, data, layout) { | |
const container = document.getElementById(elementId); | |
// Set initial size based on container | |
const updatedLayout = { | |
...layout, | |
width: container.offsetWidth, | |
height: 500, | |
autosize: true | |
}; | |
await Plotly.newPlot(elementId, data, updatedLayout, { | |
responsive: true, | |
useResizeHandler: true | |
}); | |
} | |
// Initialize both charts | |
await initializeChart('integrated-trend', integratedData.data, integratedData.layout); | |
await initializeChart('daily-trend', dailyData.data, dailyData.layout); | |
// Make charts responsive | |
function updateCharts() { | |
['integrated-trend', 'daily-trend'].forEach(id => { | |
const container = document.getElementById(id); | |
Plotly.relayout(id, { | |
width: container.offsetWidth, | |
'xaxis.autorange': true, | |
'yaxis.autorange': true, | |
'yaxis2.autorange': true | |
}); | |
}); | |
} | |
// Handle window resize | |
window.addEventListener('resize', updateCharts); | |
// Remove loading indicators and show charts | |
document.querySelectorAll('.loading').forEach(el => el.remove()); | |
document.querySelectorAll('.chart-container').forEach(el => { | |
el.style.display = 'block'; | |
el.style.height = '500px'; | |
}); | |
// Force an initial update after a small delay to ensure proper rendering | |
setTimeout(updateCharts, 100); | |
} catch (error) { | |
console.error('Failed to initialize charts:', error); | |
document.querySelectorAll('.loading').forEach(el => { | |
el.textContent = 'Failed to load chart. Please refresh the page. Error: ' + error.message; | |
}); | |
} | |
}); | |
// Helper function to load external resources | |
function loadResource(url, type) { | |
return new Promise((resolve, reject) => { | |
let elem; | |
if (type === 'style') { | |
elem = document.createElement('link'); | |
elem.rel = 'stylesheet'; | |
elem.href = url; | |
} else { | |
elem = document.createElement('script'); | |
elem.src = url; | |
} | |
elem.onload = resolve; | |
elem.onerror = reject; | |
document.head.appendChild(elem); | |
}); | |
} | |
</script> | |
</head> | |
<body> | |
<header> | |
<div class="container"> | |
<div> | |
<h1>qBittorrent Statistics Dashboard</h1> | |
<div class="timestamp">Last updated: {{ stats.latest_date }}</div> | |
</div> | |
</div> | |
</header> | |
<div class="container"> | |
<div class="stats-section"> | |
<h2>Total Statistics</h2> | |
<div class="stats-grid"> | |
<div class="stat-card download-stat"> | |
<div class="stat-label">Total Download</div> | |
<div class="stat-value">{{ stats.latest_download }} TiB</div> | |
</div> | |
<div class="stat-card upload-stat"> | |
<div class="stat-label">Total Upload</div> | |
<div class="stat-value">{{ stats.latest_upload }} TiB</div> | |
</div> | |
<div class="stat-card ratio-stat"> | |
<div class="stat-label">Share Ratio</div> | |
<div class="stat-value">{{ stats.latest_ratio }}</div> | |
</div> | |
<div class="stat-card"> | |
<div class="stat-label">Days Tracked</div> | |
<div class="stat-value">{{ stats.days_tracked }}</div> | |
<div class="stat-note">Since {{ stats.first_date }}</div> | |
</div> | |
</div> | |
</div> | |
<div class="stats-section"> | |
<h2>Average Statistics</h2> | |
<div class="stats-grid"> | |
<div class="stat-card download-stat"> | |
<div class="stat-label">Avg. Daily Download</div> | |
<div class="stat-value">{{ stats.avg_daily_download_gib }} GiB</div> | |
</div> | |
<div class="stat-card upload-stat"> | |
<div class="stat-label">Avg. Daily Upload</div> | |
<div class="stat-value">{{ stats.avg_daily_upload_gib }} GiB</div> | |
</div> | |
<div class="stat-card download-stat"> | |
<div class="stat-label">Avg. Monthly Download</div> | |
<div class="stat-value">{{ stats.avg_monthly_download_gib }} GiB</div> | |
</div> | |
<div class="stat-card upload-stat"> | |
<div class="stat-label">Avg. Monthly Upload</div> | |
<div class="stat-value">{{ stats.avg_monthly_upload_gib }} GiB</div> | |
</div> | |
</div> | |
</div> | |
<h2>All-Time Trends</h2> | |
<div class="card"> | |
<div class="loading">Loading chart...</div> | |
<div id="integrated-trend" class="chart-container" style="display: none;"></div> | |
</div> | |
<h2>Daily Summary</h2> | |
<div class="card"> | |
<div class="loading">Loading chart...</div> | |
<div id="daily-trend" class="chart-container" style="display: none;"></div> | |
</div> | |
</div> | |
<footer> | |
<div class="container"> | |
<p>Generated on {{ stats.latest_date }} • qBittorrent Statistics Dashboard</p> | |
</div> | |
</footer> | |
<style> | |
/* Non-critical CSS */ | |
.container{max-width:1200px;margin:0 auto;padding:20px}header{background-color:var(--primary-color);color:#fff;padding:20px 0;box-shadow:0 2px 10px rgba(0,0,0,.1)}header .container{display:flex;justify-content:space-between;align-items:center}h1{font-size:28px;font-weight:500;margin-bottom:10px}h2{font-size:22px;font-weight:500;margin:30px 0 15px;color:var(--primary-color);border-bottom:2px solid var(--border-color);padding-bottom:8px}.timestamp{font-size:14px;color:rgba(255,255,255,.8)}.card{background-color:var(--card-bg);border-radius:8px;box-shadow:0 2px 10px rgba(0,0,0,.05);padding:20px;margin-bottom:20px}.stats-grid{display:grid;grid-template-columns:repeat(auto-fill,minmax(250px,1fr));gap:20px;margin:20px 0}.stat-card{background-color:var(--card-bg);border-radius:8px;box-shadow:0 2px 10px rgba(0,0,0,.05);padding:20px;text-align:center;transition:transform .2s,box-shadow .2s}.stat-card:hover{transform:translateY(-5px);box-shadow:0 5px 15px rgba(0,0,0,.1)}.stat-value{font-size:24px;font-weight:700;margin:10px 0}.stat-label{font-size:14px;color:var(--light-text);text-transform:uppercase;letter-spacing:1px}.stat-note{font-size:12px;color:var(--light-text);margin-top:5px}.download-stat .stat-value{color:var(--primary-color)}.upload-stat .stat-value{color:var(--secondary-color)}.ratio-stat .stat-value{color:var(--accent-color)}.chart-container{height:500px;width:100%;margin:20px 0}.loading{text-align:center;padding:40px;color:var(--light-text);font-size:16px}footer{text-align:center;padding:20px;margin-top:40px;color:var(--light-text);font-size:14px;border-top:1px solid var(--border-color)}@media (max-width:768px){.stats-grid{grid-template-columns:1fr 1fr}header .container{flex-direction:column;text-align:center}.chart-container{height:400px}}@media (max-width:480px){.stats-grid{grid-template-columns:1fr}.chart-container{height:350px}} | |
</style> | |
</body> | |
</html>""") | |
# Generate HTML with compressed data and cache busting | |
html_content = template.render( | |
fig_json=fig_json_compressed, | |
fig_last_json=fig_last_json_compressed, | |
stats=stats, | |
cache_hash=cache_hash | |
) | |
with open(output_file, 'w') as f: | |
f.write(html_content) | |
def main(): | |
args = parse_args() | |
try: | |
generate_html(args.input, args.output, args.time_range, args.dark_mode) | |
print(f"HTML file generated: {args.output}") | |
if args.time_range != 'all': | |
print(f"Time range: {args.time_range}") | |
if args.dark_mode: | |
print("Dark mode enabled") | |
except Exception as e: | |
print(f"Error: {e}") | |
if __name__ == "__main__": | |
main() |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
Two Python scripts are used here: