|
|
|
|
from flask import Blueprint, render_template, request, send_file, jsonify
|
|
|
|
|
import math
|
|
|
|
|
from helpers.db import db_get_videos, db_get_video, db_get_recent
|
|
|
|
|
from helpers.favorites import mark_favorites, db_get_favorites, db_get_fav_set, db_get_favorite_users
|
|
|
|
|
from helpers.cache import build_cache
|
|
|
|
|
from config import VIDEOS_PER_PAGE, DASHBOARD_PER_PAGE, get_local_db_connection
|
|
|
|
|
from datetime import date, datetime, timedelta
|
|
|
|
|
import requests
|
|
|
|
|
|
|
|
|
|
web = Blueprint("web", __name__)
|
|
|
|
|
|
|
|
|
|
def _get_recording_streamers() -> set[str]:
|
|
|
|
|
try:
|
|
|
|
|
resp = requests.get("http://127.0.0.1:5000/api/get_recording/")
|
|
|
|
|
resp.raise_for_status()
|
|
|
|
|
return resp.json()
|
|
|
|
|
except Exception:
|
|
|
|
|
return set()
|
|
|
|
|
|
|
|
|
|
def _parse_dates(timeframe: str, start_str: str | None, end_str: str | None):
|
|
|
|
|
"""Return (start, end) as date objects or (None, None). End inclusive by day."""
|
|
|
|
|
today = date.today()
|
|
|
|
|
if timeframe == "week":
|
|
|
|
|
return (today - timedelta(days=6)), today
|
|
|
|
|
if timeframe == "month":
|
|
|
|
|
return (today - timedelta(days=29)), today
|
|
|
|
|
if timeframe == "year":
|
|
|
|
|
return (today - timedelta(days=364)), today
|
|
|
|
|
if timeframe == "custom":
|
|
|
|
|
try:
|
|
|
|
|
s = datetime.strptime(start_str, "%Y-%m-%d").date() if start_str else None
|
|
|
|
|
e = datetime.strptime(end_str, "%Y-%m-%d").date() if end_str else None
|
|
|
|
|
if s and e and s > e: s, e = e, s
|
|
|
|
|
return s, e
|
|
|
|
|
except ValueError:
|
|
|
|
|
return None, None
|
|
|
|
|
return None, None
|
|
|
|
|
|
|
|
|
|
@web.route("/")
|
|
|
|
|
def dashboard():
|
|
|
|
|
# ---- read filters ----
|
|
|
|
|
query = request.args.get("q", "").lower().strip()
|
|
|
|
|
sort = request.args.get("sort", "total_size")
|
|
|
|
|
dir_ = request.args.get("dir", "desc")
|
|
|
|
|
reverse = (dir_ == "desc")
|
|
|
|
|
|
|
|
|
|
timeframe = request.args.get("timeframe", "all")
|
|
|
|
|
start_str = request.args.get("start")
|
|
|
|
|
end_str = request.args.get("end")
|
|
|
|
|
|
|
|
|
|
start, end = _parse_dates(timeframe, start_str, end_str)
|
|
|
|
|
|
|
|
|
|
# ---- build cache over timeframe ----
|
|
|
|
|
cache = build_cache(start=start, end=end)
|
|
|
|
|
items = list(cache["storage_usage"].items())
|
|
|
|
|
|
|
|
|
|
# ---- search ----
|
|
|
|
|
if query:
|
|
|
|
|
items = [e for e in items if query in e[0].split("::")[0].lower()]
|
|
|
|
|
|
|
|
|
|
# ---- sort keys ----
|
|
|
|
|
def k_user(x): return x[0].split("::")[0].lower()
|
|
|
|
|
def k_platform(x): return x[0].split("::")[1].lower()
|
|
|
|
|
def k_total(x): return x[1]["total_size"]
|
|
|
|
|
def k_count(x): return x[1]["video_count"]
|
|
|
|
|
def k_avg(x): return cache["avg_sizes"][x[0]]
|
|
|
|
|
def k_last(x):
|
|
|
|
|
v = x[1].get("last_online")
|
|
|
|
|
# put None at the end when descending
|
|
|
|
|
return v if v is not None else (float("-inf") if not reverse else float("inf"))
|
|
|
|
|
|
|
|
|
|
key_map = {
|
|
|
|
|
"user": k_user,
|
|
|
|
|
"platform": k_platform,
|
|
|
|
|
"total_size": k_total,
|
|
|
|
|
"video_count": k_count,
|
|
|
|
|
"avg_size": k_avg,
|
|
|
|
|
"last_online": k_last, # <— new
|
|
|
|
|
}
|
|
|
|
|
base_key = key_map.get(sort, k_total)
|
|
|
|
|
|
|
|
|
|
# ---- get recording list → two sets: online + recording_offline ----
|
|
|
|
|
online_usernames: set[str] = set()
|
|
|
|
|
recording_offline_usernames: set[str] = set()
|
|
|
|
|
|
|
|
|
|
def user_of(item) -> str:
|
|
|
|
|
return item[0].split("::")[0]
|
|
|
|
|
def is_online(item) -> bool:
|
|
|
|
|
return user_of(item).lower() in online_usernames
|
|
|
|
|
def is_recording_offline(item) -> bool:
|
|
|
|
|
u = user_of(item).lower()
|
|
|
|
|
return (u in recording_offline_usernames) and (u not in online_usernames)
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
rec_list = _get_recording_streamers()
|
|
|
|
|
for s in rec_list or []:
|
|
|
|
|
u = (s.get("username") or "").lower()
|
|
|
|
|
if not u:
|
|
|
|
|
continue
|
|
|
|
|
if s.get("is_online"):
|
|
|
|
|
online_usernames.add(u)
|
|
|
|
|
else:
|
|
|
|
|
recording_offline_usernames.add(u)
|
|
|
|
|
except Exception:
|
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
# ---- sort with optional grouping ----
|
|
|
|
|
online_items = [x for x in items if is_online(x)]
|
|
|
|
|
recording_offline_items = [x for x in items if is_recording_offline(x)]
|
|
|
|
|
the_rest = [x for x in items if (x not in online_items) and (x not in recording_offline_items)]
|
|
|
|
|
|
|
|
|
|
online_items.sort(key=base_key, reverse=reverse)
|
|
|
|
|
recording_offline_items.sort(key=base_key, reverse=reverse)
|
|
|
|
|
the_rest.sort(key=base_key, reverse=reverse)
|
|
|
|
|
items = online_items + recording_offline_items + the_rest
|
|
|
|
|
|
|
|
|
|
# ---- paginate ----
|
|
|
|
|
page = max(1, int(request.args.get("page", 1)))
|
|
|
|
|
total_pages = max(1, math.ceil(len(items) / DASHBOARD_PER_PAGE))
|
|
|
|
|
start_idx = (page - 1) * DASHBOARD_PER_PAGE
|
|
|
|
|
paginated = items[start_idx:start_idx + DASHBOARD_PER_PAGE]
|
|
|
|
|
|
|
|
|
|
favorite_users = db_get_favorite_users()
|
|
|
|
|
|
|
|
|
|
return render_template(
|
|
|
|
|
"main.html",
|
|
|
|
|
storage_usage=paginated,
|
|
|
|
|
avg_sizes=cache["avg_sizes"],
|
|
|
|
|
page=page,
|
|
|
|
|
total_pages=total_pages,
|
|
|
|
|
query=query,
|
|
|
|
|
sort=sort,
|
|
|
|
|
dir=dir_,
|
|
|
|
|
timeframe=timeframe,
|
|
|
|
|
start_date=start_str,
|
|
|
|
|
end_date=end_str,
|
|
|
|
|
online_set=online_usernames,
|
|
|
|
|
recording_offline_set=recording_offline_usernames,
|
|
|
|
|
favorite_users=favorite_users,
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
@web.route("/user/<username>")
|
|
|
|
|
def user_page(username):
|
|
|
|
|
videos = db_get_videos(username=username)
|
|
|
|
|
page = max(1, int(request.args.get("page", 1)))
|
|
|
|
|
total_pages = max(1, math.ceil(len(videos) / VIDEOS_PER_PAGE))
|
|
|
|
|
start = (page - 1) * VIDEOS_PER_PAGE
|
|
|
|
|
paginated = videos[start:start + VIDEOS_PER_PAGE]
|
|
|
|
|
mark_favorites(paginated)
|
|
|
|
|
|
|
|
|
|
return render_template(
|
|
|
|
|
"user_page.html",
|
|
|
|
|
username=username,
|
|
|
|
|
videos=paginated,
|
|
|
|
|
page=page,
|
|
|
|
|
total_pages=total_pages
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
@web.route("/video/stream/<video_id>")
|
|
|
|
|
def stream_video(video_id):
|
|
|
|
|
video = db_get_video(video_id)
|
|
|
|
|
if video:
|
|
|
|
|
return send_file(video["filepath"], mimetype="video/mp4")
|
|
|
|
|
return "Video not found", 404
|
|
|
|
|
|
|
|
|
|
@web.route("/video/<video_id>")
|
|
|
|
|
def view_video(video_id):
|
|
|
|
|
video = db_get_video(video_id)
|
|
|
|
|
if video:
|
|
|
|
|
video["is_favorite"] = (video["video_id"] in db_get_fav_set())
|
|
|
|
|
return render_template("video_view.html", video=video)
|
|
|
|
|
return "Video not found", 404
|
|
|
|
|
|
|
|
|
|
@web.route("/recent")
|
|
|
|
|
def recent():
|
|
|
|
|
page = max(1, int(request.args.get("page", 1)))
|
|
|
|
|
videos, total = db_get_recent(page, VIDEOS_PER_PAGE)
|
|
|
|
|
total_pages = max(1, math.ceil(total / VIDEOS_PER_PAGE))
|
|
|
|
|
mark_favorites(videos)
|
|
|
|
|
|
|
|
|
|
return render_template(
|
|
|
|
|
"recent.html",
|
|
|
|
|
videos=videos,
|
|
|
|
|
page=page,
|
|
|
|
|
total_pages=total_pages
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
@web.route("/favorites")
|
|
|
|
|
def favorites_page():
|
|
|
|
|
page = max(1, int(request.args.get("page", 1)))
|
|
|
|
|
videos, total = db_get_favorites(page, VIDEOS_PER_PAGE)
|
|
|
|
|
total_pages = max(1, math.ceil(total / VIDEOS_PER_PAGE))
|
|
|
|
|
mark_favorites(videos)
|
|
|
|
|
|
|
|
|
|
return render_template(
|
|
|
|
|
"favorites.html",
|
|
|
|
|
videos=videos,
|
|
|
|
|
page=page,
|
|
|
|
|
total_pages=total_pages
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
@web.route("/users")
|
|
|
|
|
def users():
|
|
|
|
|
# ---- filters ----
|
|
|
|
|
q = (request.args.get("q") or "").lower().strip()
|
|
|
|
|
sort = request.args.get("sort", "total_size") # user|site|total_size|video_count
|
|
|
|
|
dir_ = request.args.get("dir", "desc")
|
|
|
|
|
reverse = (dir_ == "desc")
|
|
|
|
|
timeframe = request.args.get("timeframe", "all")
|
|
|
|
|
start_str = request.args.get("start")
|
|
|
|
|
end_str = request.args.get("end")
|
|
|
|
|
show_online_first = request.args.get("online") == "1"
|
|
|
|
|
|
|
|
|
|
start, end = _parse_dates(timeframe, start_str, end_str)
|
|
|
|
|
|
|
|
|
|
# ---- WHERE ----
|
|
|
|
|
where = ["1=1"]
|
|
|
|
|
params = {}
|
|
|
|
|
if q:
|
|
|
|
|
where.append("LOWER(username) LIKE %(q)s")
|
|
|
|
|
params["q"] = f"%{q}%"
|
|
|
|
|
if start:
|
|
|
|
|
where.append("created_at >= %(start)s")
|
|
|
|
|
params["start"] = start
|
|
|
|
|
if end:
|
|
|
|
|
where.append("created_at < %(end)s")
|
|
|
|
|
params["end"] = end
|
|
|
|
|
where_sql = " AND ".join(where)
|
|
|
|
|
|
|
|
|
|
# ---- ORDER BY (use computed GB alias) ----
|
|
|
|
|
sort_map = {
|
|
|
|
|
"user": "username",
|
|
|
|
|
"site": "site",
|
|
|
|
|
"total_size": "total_gb", # <- sort by GB, not raw MB sum
|
|
|
|
|
"video_count": "video_count",
|
|
|
|
|
}
|
|
|
|
|
order_col = sort_map.get(sort, "total_gb")
|
|
|
|
|
order_dir = "DESC" if reverse else "ASC"
|
|
|
|
|
|
|
|
|
|
# ---- pagination ----
|
|
|
|
|
page = max(1, int(request.args.get("page", 1)))
|
|
|
|
|
per_page = 100
|
|
|
|
|
offset = (page - 1) * per_page
|
|
|
|
|
|
|
|
|
|
# ---- count distinct (username, site) for pager ----
|
|
|
|
|
count_sql = f"""
|
|
|
|
|
SELECT COUNT(*) AS n FROM (
|
|
|
|
|
SELECT 1
|
|
|
|
|
FROM videos
|
|
|
|
|
WHERE {where_sql}
|
|
|
|
|
GROUP BY username, site
|
|
|
|
|
) t;
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
# size is in **MB**, convert to GB:
|
|
|
|
|
# 1 GiB = 1024 MB → divide by 1024.0
|
|
|
|
|
# (If you really want decimal GB, change 1024.0 to 1000.0)
|
|
|
|
|
agg_sql = f"""
|
|
|
|
|
SELECT
|
|
|
|
|
username,
|
|
|
|
|
site,
|
|
|
|
|
COUNT(*) AS video_count,
|
|
|
|
|
SUM(size) AS total_mb,
|
|
|
|
|
AVG(size) AS avg_mb,
|
|
|
|
|
(SUM(size)::numeric / 1024.0) AS total_gb,
|
|
|
|
|
(AVG(size)::numeric / 1024.0) AS avg_gb
|
|
|
|
|
FROM videos
|
|
|
|
|
WHERE {where_sql}
|
|
|
|
|
GROUP BY username, site
|
|
|
|
|
ORDER BY {order_col} {order_dir}
|
|
|
|
|
LIMIT %(limit)s OFFSET %(offset)s;
|
|
|
|
|
"""
|
|
|
|
|
params.update({"limit": per_page, "offset": offset})
|
|
|
|
|
|
|
|
|
|
conn, cur = get_local_db_connection()
|
|
|
|
|
cur = conn.cursor()
|
|
|
|
|
|
|
|
|
|
cur.execute(count_sql, params)
|
|
|
|
|
total_rows = cur.fetchone()[0] or 0
|
|
|
|
|
total_pages = max(1, math.ceil(total_rows / per_page))
|
|
|
|
|
|
|
|
|
|
cur.execute(agg_sql, params)
|
|
|
|
|
rows = cur.fetchall()
|
|
|
|
|
# rows: (username, site, video_count, total_mb, avg_mb, total_gb, avg_gb)
|
|
|
|
|
|
|
|
|
|
# ---- online/recording status sets (optional) ----
|
|
|
|
|
online_usernames: set[str] = set()
|
|
|
|
|
recording_offline_usernames: set[str] = set()
|
|
|
|
|
if show_online_first:
|
|
|
|
|
try:
|
|
|
|
|
rec_list = _get_recording_streamers()
|
|
|
|
|
for s in rec_list or []:
|
|
|
|
|
u = (s.get("username") or "").lower()
|
|
|
|
|
if not u:
|
|
|
|
|
continue
|
|
|
|
|
if s.get("is_online"):
|
|
|
|
|
online_usernames.add(u)
|
|
|
|
|
else:
|
|
|
|
|
recording_offline_usernames.add(u)
|
|
|
|
|
except Exception:
|
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
# ---- thumbnail candidates per (user, site) ----
|
|
|
|
|
tcur = conn.cursor()
|
|
|
|
|
thumb_sql = """
|
|
|
|
|
SELECT thumbnail
|
|
|
|
|
FROM videos
|
|
|
|
|
WHERE username = %(u)s
|
|
|
|
|
AND site = %(s)s
|
|
|
|
|
AND thumbnail IS NOT NULL
|
|
|
|
|
AND thumbnail <> ''
|
|
|
|
|
ORDER BY created_at DESC
|
|
|
|
|
LIMIT 3;
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
cards = []
|
|
|
|
|
for (username, site, video_count, total_mb, avg_mb, total_gb, avg_gb) in rows:
|
|
|
|
|
# fetch up to 3 recent thumbnails
|
|
|
|
|
thumb_urls = []
|
|
|
|
|
try:
|
|
|
|
|
tcur.execute(thumb_sql, {"u": username, "s": site})
|
|
|
|
|
thumb_urls = [r[0] for r in tcur.fetchall() if r and r[0]]
|
|
|
|
|
except Exception:
|
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
total_gb_val = float(total_gb or 0.0)
|
|
|
|
|
avg_gb_val = float(avg_gb or 0.0)
|
|
|
|
|
|
|
|
|
|
uname_low = (username or "").lower()
|
|
|
|
|
cards.append({
|
|
|
|
|
"user": username,
|
|
|
|
|
"site": site,
|
|
|
|
|
"video_count": int(video_count),
|
|
|
|
|
|
|
|
|
|
# numeric
|
|
|
|
|
"total_size": total_gb_val,
|
|
|
|
|
"avg_size": avg_gb_val,
|
|
|
|
|
|
|
|
|
|
# preformatted strings for display
|
|
|
|
|
"total_size_display": f"{total_gb_val:.2f}",
|
|
|
|
|
"avg_size_display": f"{avg_gb_val:.2f}",
|
|
|
|
|
|
|
|
|
|
"thumb_urls": thumb_urls,
|
|
|
|
|
"is_online": uname_low in online_usernames,
|
|
|
|
|
"is_recording_offline": (uname_low in recording_offline_usernames) and (uname_low not in online_usernames),
|
|
|
|
|
})
|
|
|
|
|
|
|
|
|
|
if show_online_first:
|
|
|
|
|
online_cards = [c for c in cards if c["is_online"]]
|
|
|
|
|
rec_off_cards = [c for c in cards if c["is_recording_offline"] and not c["is_online"]]
|
|
|
|
|
the_rest = [c for c in cards if (c not in online_cards) and (c not in rec_off_cards)]
|
|
|
|
|
|
|
|
|
|
key_map = {
|
|
|
|
|
"user": lambda c: c["user"].lower(),
|
|
|
|
|
"site": lambda c: c["site"].lower(),
|
|
|
|
|
"total_size": lambda c: c["total_size"],
|
|
|
|
|
"video_count": lambda c: c["video_count"],
|
|
|
|
|
}
|
|
|
|
|
k = key_map.get(sort, key_map["total_size"])
|
|
|
|
|
online_cards.sort(key=k, reverse=reverse)
|
|
|
|
|
rec_off_cards.sort(key=k, reverse=reverse)
|
|
|
|
|
the_rest.sort(key=k, reverse=reverse)
|
|
|
|
|
cards = online_cards + rec_off_cards + the_rest
|
|
|
|
|
|
|
|
|
|
return render_template(
|
|
|
|
|
"users.html",
|
|
|
|
|
cards=cards,
|
|
|
|
|
page=page,
|
|
|
|
|
total_pages=total_pages,
|
|
|
|
|
query=q,
|
|
|
|
|
sort=sort,
|
|
|
|
|
dir=dir_,
|
|
|
|
|
timeframe=timeframe,
|
|
|
|
|
start_date=start_str,
|
|
|
|
|
end_date=end_str,
|
|
|
|
|
online="1" if show_online_first else "0",
|
|
|
|
|
)
|