|
|
|
@ -1,131 +1,178 @@
|
|
|
|
from flask import Flask, render_template, request, redirect, url_for
|
|
|
|
# app.py – Optimised AF
|
|
|
|
from funcs import process_videos, group_videos, match_data_to_video_fast, get_all_videos, get_all_data
|
|
|
|
from flask import Flask, render_template, request, redirect, url_for, jsonify
|
|
|
|
|
|
|
|
from funcs import (
|
|
|
|
|
|
|
|
process_videos, group_videos, match_data_to_video_fast,
|
|
|
|
|
|
|
|
get_all_videos, get_all_data
|
|
|
|
|
|
|
|
)
|
|
|
|
from config import connect_redis
|
|
|
|
from config import connect_redis
|
|
|
|
import json, os, time, math, subprocess
|
|
|
|
from concurrent.futures import ThreadPoolExecutor
|
|
|
|
from tqdm import tqdm
|
|
|
|
import hashlib, json, math, os, subprocess, time, zlib
|
|
|
|
|
|
|
|
|
|
|
|
# -------------------- CONFIG -------------------- #
|
|
|
|
# ───────── CONFIG ───────── #
|
|
|
|
app = Flask(__name__)
|
|
|
|
app = Flask(__name__)
|
|
|
|
redis = connect_redis()
|
|
|
|
redis = connect_redis()
|
|
|
|
|
|
|
|
|
|
|
|
CACHE_KEY = "video_cache"
|
|
|
|
CACHE_KEY = "video_cache_v2" # bump key so we don’t fight old data
|
|
|
|
THUMB_DIR = "static/thumbnails"
|
|
|
|
META_HASH = "video_meta_v2" # per-file meta cache
|
|
|
|
VIDEOS_PER_PAGE = 20
|
|
|
|
THUMB_DIR = "static/thumbnails"
|
|
|
|
|
|
|
|
VIDEOS_PER_PAGE = 20
|
|
|
|
|
|
|
|
THUMB_WIDTH = 320 # px
|
|
|
|
|
|
|
|
FF_QUALITY = "80" # 0-100 for WebP
|
|
|
|
|
|
|
|
|
|
|
|
SCAN_DIRS = [
|
|
|
|
SCAN_DIRS = [
|
|
|
|
"E:/streamaster/downloaded/",
|
|
|
|
r"E:/streamaster/downloaded/",
|
|
|
|
"U:/encoded",
|
|
|
|
r"U:/encoded",
|
|
|
|
"U:/count_sorted"
|
|
|
|
r"U:/count_sorted"
|
|
|
|
]
|
|
|
|
]
|
|
|
|
|
|
|
|
|
|
|
|
DATA_DIRS = [
|
|
|
|
DATA_DIRS = [
|
|
|
|
"E:/streamaster/data",
|
|
|
|
r"E:/streamaster/data",
|
|
|
|
"E:/streamaster/downloaded",
|
|
|
|
r"E:/streamaster/downloaded",
|
|
|
|
]
|
|
|
|
]
|
|
|
|
|
|
|
|
|
|
|
|
os.makedirs(THUMB_DIR, exist_ok=True)
|
|
|
|
os.makedirs(THUMB_DIR, exist_ok=True)
|
|
|
|
|
|
|
|
|
|
|
|
# -------------------- UTILS -------------------- #
|
|
|
|
# ───────── UTILS ───────── #
|
|
|
|
def generate_thumbnail(video_path, thumb_path):
|
|
|
|
def _hashed_thumb_path(video_id: str) -> str:
|
|
|
|
if os.path.exists(thumb_path):
|
|
|
|
"""
|
|
|
|
return
|
|
|
|
Static/thumbnails/ab/cd/<video_id>.webp
|
|
|
|
cmd = [
|
|
|
|
keeps any subdir under ~256 files.
|
|
|
|
"ffmpeg", "-y", "-i", video_path, "-ss", "00:00:05.000",
|
|
|
|
"""
|
|
|
|
"-vframes", "1", thumb_path
|
|
|
|
h = hashlib.md5(video_id.encode()).hexdigest()
|
|
|
|
|
|
|
|
sub1, sub2 = h[:2], h[2:4]
|
|
|
|
|
|
|
|
path = os.path.join(THUMB_DIR, sub1, sub2)
|
|
|
|
|
|
|
|
os.makedirs(path, exist_ok=True)
|
|
|
|
|
|
|
|
return os.path.join(path, f"{video_id}.webp")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def _gen_thumb_cmd(src: str, dest: str):
|
|
|
|
|
|
|
|
return [
|
|
|
|
|
|
|
|
"ffmpeg", "-y", "-loglevel", "error",
|
|
|
|
|
|
|
|
"-ss", "0", "-i", src,
|
|
|
|
|
|
|
|
"-vframes", "1",
|
|
|
|
|
|
|
|
"-vf", f"thumbnail,scale={THUMB_WIDTH}:-1",
|
|
|
|
|
|
|
|
"-q:v", FF_QUALITY,
|
|
|
|
|
|
|
|
dest
|
|
|
|
]
|
|
|
|
]
|
|
|
|
subprocess.run(cmd, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def load_video_data():
|
|
|
|
def generate_thumbnail(task):
|
|
|
|
videos = []
|
|
|
|
"""Run in threadpool. task = (video_path, dest_path)"""
|
|
|
|
for d in SCAN_DIRS:
|
|
|
|
src, dest = task
|
|
|
|
videos += get_all_videos(d)
|
|
|
|
if os.path.exists(dest):
|
|
|
|
|
|
|
|
return
|
|
|
|
|
|
|
|
subprocess.run(_gen_thumb_cmd(src, dest), stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
|
|
|
|
|
|
|
|
|
|
|
|
data = []
|
|
|
|
def load_video_lists():
|
|
|
|
for d in DATA_DIRS:
|
|
|
|
videos, data = [], []
|
|
|
|
data += get_all_data(d)
|
|
|
|
for d in SCAN_DIRS: videos += get_all_videos(d)
|
|
|
|
|
|
|
|
for d in DATA_DIRS: data += get_all_data(d)
|
|
|
|
|
|
|
|
parsed, _ = match_data_to_video_fast(videos, data)
|
|
|
|
|
|
|
|
return process_videos(parsed)
|
|
|
|
|
|
|
|
|
|
|
|
parsed_videos, unmatched = match_data_to_video_fast(videos, data)
|
|
|
|
def build_cache():
|
|
|
|
parsed_videos = process_videos(parsed_videos)
|
|
|
|
parsed_videos = load_video_lists()
|
|
|
|
video_data = group_videos(parsed_videos, sort_by="count", order="desc")
|
|
|
|
grouped = group_videos(parsed_videos, sort_by="count", order="desc")
|
|
|
|
return video_data
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def compute_analytics(video_data):
|
|
|
|
|
|
|
|
storage_usage = {}
|
|
|
|
storage_usage = {}
|
|
|
|
avg_sizes = {}
|
|
|
|
avg_sizes = {}
|
|
|
|
video_map = {}
|
|
|
|
video_map = {}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# Threaded thumb generation queue
|
|
|
|
|
|
|
|
thumb_tasks = []
|
|
|
|
|
|
|
|
|
|
|
|
for (username, platform), vids in video_data.items():
|
|
|
|
for (username, platform), vids in grouped.items():
|
|
|
|
total_size_gb = sum(v['size'] for v in vids) / 1024
|
|
|
|
|
|
|
|
avg_size_gb = (total_size_gb / len(vids)) if vids else 0
|
|
|
|
|
|
|
|
key = f"{username}::{platform}"
|
|
|
|
key = f"{username}::{platform}"
|
|
|
|
storage_usage[key] = {
|
|
|
|
|
|
|
|
"total_size": total_size_gb,
|
|
|
|
total_gb = sum(v["size"] for v in vids) / 1024
|
|
|
|
"video_count": len(vids)
|
|
|
|
storage_usage[key] = {"total_size": total_gb, "video_count": len(vids)}
|
|
|
|
}
|
|
|
|
avg_sizes[key] = total_gb / len(vids) if vids else 0
|
|
|
|
avg_sizes[key] = avg_size_gb
|
|
|
|
|
|
|
|
|
|
|
|
for v in vids:
|
|
|
|
with tqdm(vids, desc=f"Generating thumbnails for {username} ({platform})") as pbar:
|
|
|
|
video_id = os.path.basename(v["filepath"]).rsplit(".", 1)[0]
|
|
|
|
for v in vids:
|
|
|
|
thumb_path = _hashed_thumb_path(video_id)
|
|
|
|
pbar.update(1)
|
|
|
|
|
|
|
|
video_id = os.path.basename(v['filepath']).split('.')[0]
|
|
|
|
# Meta-cache (skip thumb regen if unchanged)
|
|
|
|
thumb_path = os.path.join(THUMB_DIR, f"{video_id}.jpg")
|
|
|
|
mtime = os.path.getmtime(v["filepath"])
|
|
|
|
generate_thumbnail(v['filepath'], thumb_path)
|
|
|
|
meta = redis.hget(META_HASH, v["filepath"])
|
|
|
|
v['thumbnail'] = thumb_path
|
|
|
|
if not meta or json.loads(meta)["mtime"] != mtime:
|
|
|
|
video_map[key] = vids
|
|
|
|
thumb_tasks.append((v["filepath"], thumb_path))
|
|
|
|
return storage_usage, avg_sizes, video_map
|
|
|
|
redis.hset(META_HASH, v["filepath"],
|
|
|
|
|
|
|
|
json.dumps({"mtime": mtime, "thumb": thumb_path}))
|
|
|
|
def refresh_data():
|
|
|
|
v["thumbnail"] = thumb_path
|
|
|
|
video_data = load_video_data()
|
|
|
|
|
|
|
|
storage_usage, avg_sizes, video_map = compute_analytics(video_data)
|
|
|
|
video_map[key] = vids
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# Smash thumbnails in parallel
|
|
|
|
|
|
|
|
if thumb_tasks:
|
|
|
|
|
|
|
|
with ThreadPoolExecutor(max_workers=os.cpu_count()*2) as exe:
|
|
|
|
|
|
|
|
list(exe.map(generate_thumbnail, thumb_tasks))
|
|
|
|
|
|
|
|
|
|
|
|
cache = {
|
|
|
|
cache = {
|
|
|
|
"timestamp": time.time(),
|
|
|
|
"timestamp" : time.time(),
|
|
|
|
"videos": video_map,
|
|
|
|
"videos" : video_map,
|
|
|
|
"storage_usage": storage_usage,
|
|
|
|
"storage_usage" : storage_usage,
|
|
|
|
"avg_sizes": avg_sizes
|
|
|
|
"avg_sizes" : avg_sizes
|
|
|
|
}
|
|
|
|
}
|
|
|
|
redis.set(CACHE_KEY, json.dumps(cache))
|
|
|
|
# Compress JSON → binary before Redis
|
|
|
|
|
|
|
|
redis.set(CACHE_KEY, zlib.compress(json.dumps(cache).encode()))
|
|
|
|
|
|
|
|
# also drop to disk in case Redis is wiped
|
|
|
|
|
|
|
|
with open("video_cache.json.gz", "wb") as f:
|
|
|
|
|
|
|
|
f.write(zlib.compress(json.dumps(cache).encode()))
|
|
|
|
return cache
|
|
|
|
return cache
|
|
|
|
|
|
|
|
|
|
|
|
def get_cached_data():
|
|
|
|
def get_cached_data():
|
|
|
|
try:
|
|
|
|
# try Redis first
|
|
|
|
cached = redis.get(CACHE_KEY)
|
|
|
|
blob = redis.get(CACHE_KEY)
|
|
|
|
return json.loads(cached) # ✅ Use cache if it exists
|
|
|
|
if blob:
|
|
|
|
except Exception as e:
|
|
|
|
return json.loads(zlib.decompress(blob).decode())
|
|
|
|
return refresh_data() # ✅ Generate and store fresh data if empty
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# -------------------- ROUTES -------------------- #
|
|
|
|
# fallback to disk
|
|
|
|
|
|
|
|
if os.path.exists("video_cache.json.gz"):
|
|
|
|
|
|
|
|
with open("video_cache.json.gz", "rb") as f:
|
|
|
|
|
|
|
|
return json.loads(zlib.decompress(f.read()).decode())
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# last resort full rebuild
|
|
|
|
|
|
|
|
return build_cache()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# ───────── ROUTES ───────── #
|
|
|
|
@app.route("/")
|
|
|
|
@app.route("/")
|
|
|
|
def dashboard():
|
|
|
|
def dashboard():
|
|
|
|
cache = get_cached_data()
|
|
|
|
cache = get_cached_data()
|
|
|
|
sorted_usage = sorted(cache["storage_usage"].items(), key=lambda x: x[1]["total_size"], reverse=True)
|
|
|
|
sorted_usage = sorted(
|
|
|
|
return render_template("analytics.html", storage_usage=sorted_usage, avg_sizes=cache["avg_sizes"])
|
|
|
|
cache["storage_usage"].items(),
|
|
|
|
|
|
|
|
key=lambda x: x[1]["total_size"],
|
|
|
|
|
|
|
|
reverse=True
|
|
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
return render_template(
|
|
|
|
|
|
|
|
"analytics.html",
|
|
|
|
|
|
|
|
storage_usage=sorted_usage,
|
|
|
|
|
|
|
|
avg_sizes=cache["avg_sizes"]
|
|
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
@app.route("/refresh")
|
|
|
|
@app.route("/refresh")
|
|
|
|
def refresh():
|
|
|
|
def refresh():
|
|
|
|
refresh_data()
|
|
|
|
cache = build_cache()
|
|
|
|
return redirect(url_for("dashboard"))
|
|
|
|
return jsonify({
|
|
|
|
|
|
|
|
"status" : "ok",
|
|
|
|
|
|
|
|
"videos" : sum(x["video_count"] for x in cache["storage_usage"].values()),
|
|
|
|
|
|
|
|
"updated" : time.ctime(cache["timestamp"])
|
|
|
|
|
|
|
|
})
|
|
|
|
|
|
|
|
|
|
|
|
@app.route("/user/<username>")
|
|
|
|
@app.route("/user/<username>")
|
|
|
|
def user_page(username):
|
|
|
|
def user_page(username):
|
|
|
|
cache = get_cached_data()
|
|
|
|
cache = get_cached_data()
|
|
|
|
videos = []
|
|
|
|
videos = [v | {"platform": key.split("::")[1]}
|
|
|
|
for key, vid_list in cache["videos"].items():
|
|
|
|
for key, vids in cache["videos"].items()
|
|
|
|
user, platform = key.split("::")
|
|
|
|
if key.split("::")[0] == username
|
|
|
|
if user == username:
|
|
|
|
for v in vids]
|
|
|
|
for v in vid_list:
|
|
|
|
|
|
|
|
v['platform'] = platform
|
|
|
|
page = max(1, int(request.args.get("page", 1)))
|
|
|
|
videos.extend(vid_list)
|
|
|
|
total_pages = max(1, math.ceil(len(videos) / VIDEOS_PER_PAGE))
|
|
|
|
|
|
|
|
start = (page - 1) * VIDEOS_PER_PAGE
|
|
|
|
page = int(request.args.get("page", 1))
|
|
|
|
return render_template(
|
|
|
|
total_pages = math.ceil(len(videos) / VIDEOS_PER_PAGE)
|
|
|
|
"user_page.html",
|
|
|
|
start = (page - 1) * VIDEOS_PER_PAGE
|
|
|
|
username=username,
|
|
|
|
paginated = videos[start:start + VIDEOS_PER_PAGE]
|
|
|
|
videos=videos[start:start + VIDEOS_PER_PAGE],
|
|
|
|
|
|
|
|
page=page, total_pages=total_pages
|
|
|
|
return render_template("user_page.html",
|
|
|
|
)
|
|
|
|
username=username,
|
|
|
|
|
|
|
|
videos=paginated,
|
|
|
|
|
|
|
|
page=page,
|
|
|
|
|
|
|
|
total_pages=total_pages)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
|
if __name__ == "__main__":
|
|
|
|
app.run(debug=True)
|
|
|
|
app.run(debug=True)
|