diff --git a/.gitignore b/.gitignore index 74a067a..24ae378 100644 --- a/.gitignore +++ b/.gitignore @@ -185,3 +185,4 @@ cython_debug/ .last_checked /concated /edited +config_recorder.txt diff --git a/concat_helper.py b/concat_helper.py index d53ff94..4871f07 100644 --- a/concat_helper.py +++ b/concat_helper.py @@ -69,7 +69,6 @@ def concat_copy(videos, out_path): out_path, ] - print("Running FFmpeg concat...") result = subprocess.run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True) os.unlink(list_file) @@ -115,6 +114,7 @@ def concatenate_videos(videos_list, reencode_concate = False): if not reencode_concate: return False + print("Falling back to re-encoding due to concat failure.") return encode_concatenate_videos(videos_list) def copy_concatenate_videos(videos_list): @@ -134,7 +134,6 @@ def copy_concatenate_videos(videos_list): success = concat_copy(videos_list, output_path) if not success: - print("Falling back to re-encoding due to concat failure.") return False # Remove originals @@ -147,7 +146,7 @@ def copy_concatenate_videos(videos_list): return main_video def encode_concatenate_videos(videos_list): - """Your existing function to encode and concatenate videos.""" + """Encode and concatenate videos without ffmpeg spam in console.""" main_video = videos_list[0] video_path = main_video["filepath"] @@ -163,17 +162,12 @@ def encode_concatenate_videos(videos_list): target_width, target_height = get_target_resolution(videos_list) target_bitrate_kbps = get_target_bitrate(target_width, target_height) - # Clamp target bitrate to not exceed source if current_bitrate > 0: target_bitrate_kbps = min(target_bitrate_kbps, current_bitrate) - - # Max bitrate shouldn't exceed source either - if current_bitrate > 0: max_bitrate_kbps = min(int(1.5 * target_bitrate_kbps), current_bitrate) else: max_bitrate_kbps = int(1.5 * target_bitrate_kbps) - fps_float = get_fps(video_path) or video_info.get('fps') or 30.0 if fps_float <= 0: fps_float = 30.0 @@ -184,14 +178,13 @@ def encode_concatenate_videos(videos_list): print(f" Target Bitrate: {target_bitrate_kbps}k (max ~{max_bitrate_kbps}k)") print(f" Keyframe Interval: {keyframe_interval}") - cmd = ["ffmpeg", "-y"] # Overwrite output if exists + cmd = ["ffmpeg", "-y"] for v in videos_list: cmd.extend(["-i", v["filepath"]]) filter_statements = [] concat_streams = [] n = len(videos_list) - unified_fps = 30 for i in range(n): @@ -200,10 +193,8 @@ def encode_concatenate_videos(videos_list): ) concat_streams.append(f"[v{i}][{i}:a]") - # Example final: [v0][0:a][v1][1:a]concat=n=2:v=1:a=1[outv][outa] concat_line = "".join(concat_streams) + f"concat=n={n}:v=1:a=1[outv][outa]" filter_statements.append(concat_line) - filter_complex = ";".join(filter_statements) cmd.extend([ @@ -222,13 +213,12 @@ def encode_concatenate_videos(videos_list): ]) try: - subprocess.run(cmd, check=True) - except: + subprocess.run(cmd, check=True, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) + except subprocess.CalledProcessError: return False for video in videos_list: os.remove(video["filepath"]) shutil.move(temp_path, output_path) - - return main_video \ No newline at end of file + return main_video diff --git a/concater.py b/concater.py index 054a89a..1d83be2 100644 --- a/concater.py +++ b/concater.py @@ -22,41 +22,32 @@ def organize_videos(): # process the videos video_data = group_videos(videos, sort_by="size", order="asc") + print("Grouping videos for concatenation...") # group all videos for concatation first. grouped_videos = [] for user, videos in video_data.items(): grouped_videos.extend(group_for_concatenation_simple(videos)) + sorted_processed_videos = sorted(grouped_videos, key=sort_type["count"], reverse=True) # group the videos for concatenation for video_list in sorted_processed_videos: + print(100*"=") + print("\n"*2) + video_id = video_list[0]['video_id'] - videos_sum_size = sum([video['size'] for video in video_list]) - print(100*"=") - print("\n"*2) print(f"Group {video_id} has {len(video_list)} videos and total size of {videos_sum_size} MB") - print("\n"*2) - print(100*"=") - main_video = concatenate_videos(video_list, reencode_concate=True) if main_video: print(f"Processed {len(video_list)} input videos into {main_video["filepath"]} output video.") continue - - if MOVE_FUCKED: - print(f"Videos are fucked.") - - main_video = video_list[0] - video_name = main_video['video_id'] - fucked_dir = os.path.join("concate_fucked", video_name) - os.makedirs(fucked_dir, exist_ok=True) - for video in video_list: - shutil.move(video['filepath'], os.path.join(fucked_dir, os.path.basename(video['filepath']))) + print(f"Failed to process {len(video_list)} input videos into output video.") + if __name__ == "__main__": organize_videos() \ No newline at end of file diff --git a/funcs.py b/funcs.py index fbfe367..8c8913a 100644 --- a/funcs.py +++ b/funcs.py @@ -399,7 +399,7 @@ def calculate_file_hash(file_path): data = f.read() return hashlib.sha256(data).hexdigest() -def group_for_concatenation_simple(videos, time_limit=60): +def group_for_concatenation_simple(videos, time_limit=120): """ Groups videos into lists where: - total group size <= 9GB (9216 MB), @@ -418,11 +418,12 @@ def group_for_concatenation_simple(videos, time_limit=60): if current_group: # Check if adding this video breaks the size limit time_difference = (video_start - last_video_end).total_seconds() / 60 - size_exceeded = (current_size_mb + video['size'] > 9216) time_exceeded = (time_difference > time_limit) + # size_exceeded = (current_size_mb + video['size'] > 9216) + # If we exceed size, exceed time gap, or mismatch in parameters => start new group - if size_exceeded or time_exceeded: + if time_exceeded: #or size_exceeded: concatenated_video_groups.append(current_group) current_group = [] current_size_mb = 0 diff --git a/helpers/cache.py b/helpers/cache.py index 91848e8..27d1a68 100644 --- a/helpers/cache.py +++ b/helpers/cache.py @@ -20,16 +20,21 @@ def build_cache(start=None, end=None): key = f"{username}::{platform}" total_gb = 0.0 + last_online = None for v in vids: try: total_gb += float(v.get("size", 0) or 0) / 1024.0 except (ValueError, TypeError): - # ignore bad rows - continue + pass + # track latest created_at + ca = v.get("created_at") + if ca and (last_online is None or ca > last_online): + last_online = ca storage_usage[key] = { "total_size": total_gb, - "video_count": len(vids) + "video_count": len(vids), + "last_online": last_online, # <— new } avg_sizes[key] = (total_gb / len(vids)) if vids else 0.0 video_map[key] = vids diff --git a/routes/web.py b/routes/web.py index cde292a..ba1cf85 100644 --- a/routes/web.py +++ b/routes/web.py @@ -64,6 +64,10 @@ def dashboard(): def k_total(x): return x[1]["total_size"] def k_count(x): return x[1]["video_count"] def k_avg(x): return cache["avg_sizes"][x[0]] + def k_last(x): + v = x[1].get("last_online") + # put None at the end when descending + return v if v is not None else (float("-inf") if not reverse else float("inf")) key_map = { "user": k_user, @@ -71,11 +75,11 @@ def dashboard(): "total_size": k_total, "video_count": k_count, "avg_size": k_avg, + "last_online": k_last, # <— new } base_key = key_map.get(sort, k_total) # ---- get recording list → two sets: online + recording_offline ---- - # _get_recording_streamers() returns: [{"username": "...", "is_online": true/false}, ...] online_usernames: set[str] = set() recording_offline_usernames: set[str] = set() @@ -101,14 +105,13 @@ def dashboard(): pass # ---- sort with optional grouping ---- - online_items = [x for x in items if is_online(x)] - recording_offline_items= [x for x in items if is_recording_offline(x)] - the_rest = [x for x in items if (x not in online_items) and (x not in recording_offline_items)] + online_items = [x for x in items if is_online(x)] + recording_offline_items = [x for x in items if is_recording_offline(x)] + the_rest = [x for x in items if (x not in online_items) and (x not in recording_offline_items)] online_items.sort(key=base_key, reverse=reverse) recording_offline_items.sort(key=base_key, reverse=reverse) the_rest.sort(key=base_key, reverse=reverse) - items = online_items + recording_offline_items + the_rest # ---- paginate ---- diff --git a/static/styles.css b/static/styles.css index 1764c4b..31e2dd5 100644 --- a/static/styles.css +++ b/static/styles.css @@ -71,7 +71,7 @@ table{ } th,td{border:1px solid var(--line); padding:10px; text-align:left} th{ - background:#222; position:sticky; top:48px; /* align with .topbar height */ + background:#222; position:sticky; z-index:10 } tr:nth-child(even){background:#181818} diff --git a/templates/main.html b/templates/main.html index 6ad084a..9798fd3 100644 --- a/templates/main.html +++ b/templates/main.html @@ -37,19 +37,14 @@ {% set next_total_dir = 'asc' if sort != 'total_size' or dir == 'desc' else 'desc' %} {% set next_count_dir = 'asc' if sort != 'video_count' or dir == 'desc' else 'desc' %} {% set next_avg_dir = 'asc' if sort != 'avg_size' or dir == 'desc' else 'desc' %} + {% set next_last_dir = 'asc' if sort != 'last_online' or dir == 'desc' else 'desc' %} - - User{% if sort=='user' %} {{ '▲' if dir=='asc' else '▼' }}{% endif %} - - Platform{% if sort=='platform' %} {{ '▲' if dir=='asc' else '▼' }}{% endif %} - - Total Storage (GB){% if sort=='total_size' %} {{ '▲' if dir=='asc' else '▼' }}{% endif %} - - Video Count{% if sort=='video_count' %} {{ '▲' if dir=='asc' else '▼' }}{% endif %} - - Avg Size per Video (GB){% if sort=='avg_size' %} {{ '▲' if dir=='asc' else '▼' }}{% endif %} - - Last Online{% if sort=='last_online' %} {{ '▲' if dir=='asc' else '▼' }}{% endif %} + User{% if sort=='user' %} {{ '▲' if dir=='asc' else '▼' }}{% endif %} + Platform{% if sort=='platform' %} {{ '▲' if dir=='asc' else '▼' }}{% endif %} + Total Storage (GB){% if sort=='total_size' %} {{ '▲' if dir=='asc' else '▼' }}{% endif %} + Video Count{% if sort=='video_count' %} {{ '▲' if dir=='asc' else '▼' }}{% endif %} + Avg Size per Video (GB){% if sort=='avg_size' %} {{ '▲' if dir=='asc' else '▼' }}{% endif %} + Last Online{% if sort=='last_online' %} {{ '▲' if dir=='asc' else '▼' }}{% endif %} @@ -84,7 +79,7 @@ -{{ pager('web.dashboard', page, total_pages, q=query, sort=sort, dir=dir, timeframe=timeframe, start=start_date, end=end_str) }} +{{ pager('web.dashboard', page, total_pages, q=query, sort=sort, dir=dir, timeframe=timeframe, start=start_str, end=end_str) }} {% endblock %} {% block scripts %} diff --git a/video_funcs.py b/video_funcs.py index 331400e..785986f 100644 --- a/video_funcs.py +++ b/video_funcs.py @@ -82,18 +82,56 @@ def get_video_info(filepath): except Exception: return {"width": 0, "height": 0, "bitrate": 0, "fps": 0.0} -def get_target_resolution(group): - """Collect the most common resolution from the group's videos.""" +def get_common_resolution(group): + """Most common (w,h) across the group's videos. Fallback 1280x720.""" resolutions = [] for v in group: info = get_video_info(v["filepath"]) - w, h = info["width"], info["height"] + w, h = info.get("width"), info.get("height") if w and h: resolutions.append((w, h)) if not resolutions: return (1280, 720) return Counter(resolutions).most_common(1)[0][0] +def get_target_resolution(group): + """ + Choose (w,h) whose videos have the highest *total duration*. + Tie-breakers: higher count, then larger area. Fallback 1280x720. + """ + totals = {} # (w,h) -> total duration + counts = {} # (w,h) -> number of files + + for v in group: + info = get_video_info(v["filepath"]) + w, h = info.get("width"), info.get("height") + if not (w and h): + continue + + # Prefer DB duration if present, else probe info['duration'], else 0 + dur = v.get("duration", info.get("duration", 0)) + try: + dur = float(dur) + except (TypeError, ValueError): + dur = 0.0 + + key = (w, h) + totals[key] = totals.get(key, 0.0) + dur + counts[key] = counts.get(key, 0) + 1 + + if not totals: + return (1280, 720) + + def sort_key(item): + (w, h), total = item + cnt = counts[(w, h)] + area = (w or 0) * (h or 0) + return (total, cnt, area) + + best_resolution = max(totals.items(), key=sort_key)[0] + return best_resolution + + def get_target_bitrate(width, height): """Your existing function to choose a bitrate based on resolution.""" resolutions = {(854, 480): 1000,(1280, 720): 1500,(1920, 1080): 3000,(2560, 1440): 5000,(3840, 2160): 12000}