diff --git a/helpers/cache.py b/helpers/cache.py index 3ed5ef3..91848e8 100644 --- a/helpers/cache.py +++ b/helpers/cache.py @@ -1,10 +1,13 @@ import time from helpers.db import db_get_videos -# from helpers.thumbnails import generate_thumbnails_for_videos # optional # ───────── CACHE BUILDER ───────── # -def build_cache(): - videos = db_get_videos() +def build_cache(start=None, end=None): + """ + Build storage usage cache over an optional date range. + start/end are date or datetime; end is inclusive by calendar day. + """ + videos = db_get_videos(start=start, end=end) grouped = {} for v in videos: @@ -16,27 +19,24 @@ def build_cache(): for (username, platform), vids in grouped.items(): key = f"{username}::{platform}" - total_gb = 0 + total_gb = 0.0 for v in vids: try: - total_gb += float(v.get("size", 0) or 0) / 1024 - except ValueError: - print(f"⚠️ Invalid size for video {v.get('video_id')}: {v.get('size')}") + total_gb += float(v.get("size", 0) or 0) / 1024.0 + except (ValueError, TypeError): + # ignore bad rows + continue storage_usage[key] = { "total_size": total_gb, "video_count": len(vids) } - avg_sizes[key] = total_gb / len(vids) if vids else 0 - + avg_sizes[key] = (total_gb / len(vids)) if vids else 0.0 video_map[key] = vids - # Thumbnail generation is optional, uncomment if you want it auto-built: - # generate_thumbnails_for_videos(videos) - return { "timestamp": time.time(), "videos": video_map, "storage_usage": storage_usage, "avg_sizes": avg_sizes - } \ No newline at end of file + } diff --git a/helpers/db.py b/helpers/db.py index 027505d..0e8c2a2 100644 --- a/helpers/db.py +++ b/helpers/db.py @@ -1,11 +1,16 @@ import psycopg2.extras -from config import get_local_db_connection # central config +from datetime import datetime, timedelta +from config import get_local_db_connection # ───────── DB HELPER ───────── # -def db_get_videos(username: str = None): +def db_get_videos(username: str = None, start=None, end=None): + """ + Fetch videos, optionally filtered by username and created_at date range. + `start` / `end` can be date or datetime (UTC). End is inclusive by day. + """ conn, cur = get_local_db_connection() cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor) - + query = """ SELECT video_id, username, site AS platform, filepath, size, duration, gender, @@ -19,6 +24,21 @@ def db_get_videos(username: str = None): query += " AND username = %s" params.append(username) + if start is not None: + # Normalize to datetime midnight if date + if hasattr(start, "hour") is False: + start = datetime.combine(start, datetime.min.time()) + query += " AND created_at >= %s" + params.append(start) + + if end is not None: + # Make end inclusive by bumping 1 day and using '<' + if hasattr(end, "hour") is False: + end = datetime.combine(end, datetime.min.time()) + end_exclusive = end + timedelta(days=1) + query += " AND created_at < %s" + params.append(end_exclusive) + query += " ORDER BY created_at DESC" cur.execute(query, params) diff --git a/routes/web.py b/routes/web.py index 30b973f..bc277b7 100644 --- a/routes/web.py +++ b/routes/web.py @@ -7,19 +7,52 @@ from config import VIDEOS_PER_PAGE, DASHBOARD_PER_PAGE web = Blueprint("web", __name__) +from datetime import date, datetime, timedelta +def _parse_dates(timeframe: str, start_str: str | None, end_str: str | None): + """Return (start, end) as date objects or (None, None). End inclusive by day.""" + today = date.today() + if timeframe == "week": + # This week = last 7 days ending today + return (today - timedelta(days=6)), today + if timeframe == "month": + # Last 30 days + return (today - timedelta(days=29)), today + if timeframe == "year": + # Last 365 days + return (today - timedelta(days=364)), today + if timeframe == "custom": + try: + s = datetime.strptime(start_str, "%Y-%m-%d").date() if start_str else None + e = datetime.strptime(end_str, "%Y-%m-%d").date() if end_str else None + if s and e and s > e: s, e = e, s + return s, e + except ValueError: + return None, None + # "all" or unknown → no filter + return None, None + @web.route("/") def dashboard(): - cache = build_cache() + # ---- read filters ---- query = request.args.get("q", "").lower().strip() sort = request.args.get("sort", "total_size") dir_ = request.args.get("dir", "desc") + timeframe = request.args.get("timeframe", "all") + start_str = request.args.get("start") + end_str = request.args.get("end") reverse = (dir_ == "desc") + start, end = _parse_dates(timeframe, start_str, end_str) + + # ---- build cache over timeframe ---- + cache = build_cache(start=start, end=end) items = list(cache["storage_usage"].items()) + # ---- search ---- if query: items = [e for e in items if query in e[0].split("::")[0].lower()] + # ---- sort ---- def k_user(x): return x[0].split("::")[0].lower() def k_platform(x): return x[0].split("::")[1].lower() def k_total(x): return x[1]["total_size"] @@ -35,10 +68,11 @@ def dashboard(): } items.sort(key=key_map.get(sort, k_total), reverse=reverse) + # ---- paginate ---- page = max(1, int(request.args.get("page", 1))) total_pages = max(1, math.ceil(len(items) / DASHBOARD_PER_PAGE)) - start = (page - 1) * DASHBOARD_PER_PAGE - paginated = items[start:start + DASHBOARD_PER_PAGE] + start_idx = (page - 1) * DASHBOARD_PER_PAGE + paginated = items[start_idx:start_idx + DASHBOARD_PER_PAGE] return render_template( "main.html", @@ -48,7 +82,10 @@ def dashboard(): total_pages=total_pages, query=query, sort=sort, - dir=dir_ + dir=dir_, + timeframe=timeframe, + start_date=start_str, + end_date=end_str ) @web.route("/refresh") diff --git a/templates/main.html b/templates/main.html index d09f8f2..c49b5be 100644 --- a/templates/main.html +++ b/templates/main.html @@ -1,80 +1,148 @@ + 📊 Video Storage Analytics + - + updateTable(); // Initial render +

📊 Video Storage Analytics

+
+ + + + + + + + + + + +
+ @@ -86,7 +154,7 @@ - + {% set next_user_dir = 'asc' if sort != 'user' or dir == 'desc' else 'desc' %} {% set next_platform_dir = 'asc' if sort != 'platform' or dir == 'desc' else 'desc' %} {% set next_total_dir = 'asc' if sort != 'total_size' or dir == 'desc' else 'desc' %} @@ -94,69 +162,69 @@ {% set next_avg_dir = 'asc' if sort != 'avg_size' or dir == 'desc' else 'desc' %} - + {% for key, stats in storage_usage %} - {% set user, platform = key.split("::") %} - - - - - - - + {% set user, platform = key.split("::") %} + + + + + + + {% endfor %}
- + User{% if sort=='user' %} {{ '▲' if dir=='asc' else '▼' }}{% endif %} - + - + Platform{% if sort=='platform' %} {{ '▲' if dir=='asc' else '▼' }}{% endif %} - + - + Total Storage (GB){% if sort=='total_size' %} {{ '▲' if dir=='asc' else '▼' }}{% endif %} - + - + Video Count{% if sort=='video_count' %} {{ '▲' if dir=='asc' else '▼' }}{% endif %} - + - + Avg Size per Video (GB){% if sort=='avg_size' %} {{ '▲' if dir=='asc' else '▼' }}{% endif %} - +
{{ user }}{{ platform }}{{ "%.2f"|format(stats.total_size) }}{{ stats.video_count }}{{ "%.2f"|format(avg_sizes[key]) }}
{{ user }}{{ platform }}{{ "%.2f"|format(stats.total_size) }}{{ stats.video_count }}{{ "%.2f"|format(avg_sizes[key]) }}
{% if total_pages > 1 %} - {% endif %} - + + \ No newline at end of file