Implement video transcoding, add dashboard UI, and cleanup repository
This commit is contained in:
parent
94f077944b
commit
2f8ec83cd8
7 changed files with 1060 additions and 115 deletions
3
.gitignore
vendored
3
.gitignore
vendored
|
|
@ -1,3 +1,6 @@
|
|||
__pycache__/
|
||||
*.pyc
|
||||
.env
|
||||
*.tar
|
||||
*.gz
|
||||
ta-organizerr.tar.gz
|
||||
|
|
|
|||
|
|
@ -1,6 +1,8 @@
|
|||
FROM python:3.11-slim
|
||||
WORKDIR /app
|
||||
COPY ta_symlink.py .
|
||||
COPY . .
|
||||
RUN apt-get update && apt-get install -y ffmpeg && rm -rf /var/lib/apt/lists/*
|
||||
RUN pip install --no-cache-dir requests flask
|
||||
RUN mkdir -p /app/data
|
||||
EXPOSE 5000
|
||||
CMD ["python", "ta_symlink.py"]
|
||||
|
|
|
|||
|
|
@ -3,8 +3,12 @@ services:
|
|||
build: /mnt/user/appdata/dockerbuildings
|
||||
container_name: ta-organizer
|
||||
volumes:
|
||||
- /mnt/user/appdata/dockerbuildings/source:/app/source:ro
|
||||
- /mnt/user/appdata/dockerbuildings/source:/app/source
|
||||
- /mnt/user/appdata/dockerbuildings/target:/app/target
|
||||
- /mnt/user/appdata/dockerbuildings/data:/app/data
|
||||
ports:
|
||||
- "8002:5000"
|
||||
environment:
|
||||
- SCAN_INTERVAL=${SCAN_INTERVAL:-60}
|
||||
- SCAN_INTERVAL=60
|
||||
- ALLOWED_IPS=127.0.0.1,192.168.1.0/24,10.0.0.0/8,172.16.0.0/12
|
||||
env_file: /mnt/user/appdata/dockerbuildings/.env
|
||||
|
|
|
|||
Binary file not shown.
642
ta_symlink.py
642
ta_symlink.py
|
|
@ -1,4 +1,3 @@
|
|||
|
||||
from pathlib import Path
|
||||
import os
|
||||
import requests
|
||||
|
|
@ -6,29 +5,266 @@ import re
|
|||
import sys
|
||||
import threading
|
||||
import time
|
||||
from flask import Flask, jsonify, render_template_string, request
|
||||
import ipaddress
|
||||
from flask import Flask, jsonify, render_template, request, abort
|
||||
|
||||
# Load config from environment variables
|
||||
API_URL = os.getenv("API_URL", "http://localhost:8457/api")
|
||||
VIDEO_URL = os.getenv("VIDEO_URL", "http://localhost:8457/video/")
|
||||
API_TOKEN = os.getenv("API_TOKEN", "")
|
||||
SCAN_INTERVAL = int(os.getenv("SCAN_INTERVAL", 60)) # Default 60 minutes
|
||||
ALLOWED_IPS = [ip.strip() for ip in os.getenv("ALLOWED_IPS", "127.0.0.1").split(",")]
|
||||
SOURCE_DIR = Path("/app/source")
|
||||
TARGET_DIR = Path("/app/target")
|
||||
HEADERS = {"Authorization": f"Token {API_TOKEN}"}
|
||||
|
||||
app = Flask(__name__)
|
||||
|
||||
# Database setup
|
||||
import sqlite3
|
||||
from contextlib import contextmanager
|
||||
|
||||
DB_PATH = Path("/app/data/videos.db")
|
||||
DB_PATH.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
@contextmanager
|
||||
def get_db():
|
||||
conn = sqlite3.connect(DB_PATH)
|
||||
conn.row_factory = sqlite3.Row
|
||||
try:
|
||||
yield conn
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
def init_db():
|
||||
with get_db() as conn:
|
||||
conn.execute("""
|
||||
CREATE TABLE IF NOT EXISTS videos (
|
||||
video_id TEXT PRIMARY KEY,
|
||||
title TEXT,
|
||||
channel TEXT,
|
||||
published TEXT,
|
||||
symlink TEXT,
|
||||
status TEXT,
|
||||
last_updated TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
)
|
||||
""")
|
||||
conn.commit()
|
||||
|
||||
init_db()
|
||||
|
||||
# Global State
|
||||
processed_videos = []
|
||||
log_buffer = []
|
||||
log_lock = threading.Lock()
|
||||
transcode_log_buffer = []
|
||||
transcode_log_lock = threading.Lock()
|
||||
|
||||
# Utility functions
|
||||
def log(msg):
|
||||
"""Logs a message to stdout and the in-memory buffer."""
|
||||
print(msg, flush=True)
|
||||
with log_lock:
|
||||
log_buffer.append(msg)
|
||||
if len(log_buffer) > 1000:
|
||||
log_buffer.pop(0)
|
||||
|
||||
def tlog(msg):
|
||||
"""Logs a message to the transcode log buffer."""
|
||||
print(f"[TRANSCODE] {msg}", flush=True)
|
||||
with transcode_log_lock:
|
||||
transcode_log_buffer.append(msg)
|
||||
if len(transcode_log_buffer) > 500:
|
||||
transcode_log_buffer.pop(0)
|
||||
|
||||
def detect_encoder():
|
||||
"""Detect best available hardware encoder."""
|
||||
import subprocess
|
||||
try:
|
||||
result = subprocess.run(['ffmpeg', '-hide_banner', '-encoders'],
|
||||
capture_output=True, text=True)
|
||||
encoders = result.stdout
|
||||
|
||||
if 'h264_nvenc' in encoders:
|
||||
return 'h264_nvenc'
|
||||
elif 'h264_vaapi' in encoders:
|
||||
return 'h264_vaapi'
|
||||
elif 'h264_videotoolbox' in encoders:
|
||||
return 'h264_videotoolbox'
|
||||
else:
|
||||
return 'libx264'
|
||||
except:
|
||||
return 'libx264'
|
||||
|
||||
def probe_codecs(filepath):
|
||||
"""Probe video and audio codecs using ffprobe."""
|
||||
import subprocess
|
||||
try:
|
||||
# Get video codec
|
||||
v_result = subprocess.run([
|
||||
'ffprobe', '-v', 'error', '-select_streams', 'v:0',
|
||||
'-show_entries', 'stream=codec_name', '-of', 'csv=p=0', filepath
|
||||
], capture_output=True, text=True)
|
||||
video_codec = v_result.stdout.strip()
|
||||
|
||||
# Get audio codec
|
||||
a_result = subprocess.run([
|
||||
'ffprobe', '-v', 'error', '-select_streams', 'a:0',
|
||||
'-show_entries', 'stream=codec_name', '-of', 'csv=p=0', filepath
|
||||
], capture_output=True, text=True)
|
||||
audio_codec = a_result.stdout.strip()
|
||||
|
||||
return video_codec, audio_codec
|
||||
except Exception as e:
|
||||
tlog(f"Error probing {filepath}: {e}")
|
||||
return None, None
|
||||
|
||||
def transcode_video(filepath, encoder='libx264'):
|
||||
"""Transcode a video file to H.264/AAC."""
|
||||
import subprocess
|
||||
|
||||
original_path = Path(filepath)
|
||||
|
||||
# Try to resolve symlink first (don't check if it exists, broken symlinks still exist as links)
|
||||
if original_path.is_symlink():
|
||||
try:
|
||||
actual_file = Path(os.readlink(original_path)).resolve()
|
||||
tlog(f"Following symlink: {filepath} -> {actual_file}")
|
||||
|
||||
# Translate host path to container path
|
||||
# Host: /mnt/user/tubearchives/bp/... → Container: /app/source/...
|
||||
actual_file_str = str(actual_file)
|
||||
if actual_file_str.startswith("/mnt/user/tubearchives/bp"):
|
||||
container_path = actual_file_str.replace("/mnt/user/tubearchives/bp", "/app/source", 1)
|
||||
tlog(f"Translated path: {actual_file} -> {container_path}")
|
||||
filepath = container_path
|
||||
else:
|
||||
filepath = str(actual_file)
|
||||
except Exception as e:
|
||||
tlog(f"Error resolving symlink: {e}")
|
||||
return False
|
||||
elif not original_path.exists():
|
||||
tlog(f"File not found: {filepath}")
|
||||
return False
|
||||
|
||||
# Now check if the actual file exists
|
||||
if not Path(filepath).exists():
|
||||
tlog(f"Source file not found: {filepath}")
|
||||
return False
|
||||
|
||||
video_codec, audio_codec = probe_codecs(filepath)
|
||||
|
||||
if video_codec == 'h264' and audio_codec == 'aac':
|
||||
tlog(f"Already H.264/AAC: {filepath}")
|
||||
return True
|
||||
|
||||
temp_file = f"{filepath}.temp.mp4"
|
||||
|
||||
try:
|
||||
# Determine transcode strategy
|
||||
if video_codec == 'h264':
|
||||
tlog(f"Audio-only transcode: {filepath}")
|
||||
cmd = [
|
||||
'ffmpeg', '-v', 'error', '-stats', '-i', filepath,
|
||||
'-c:v', 'copy',
|
||||
'-c:a', 'aac', '-b:a', '192k',
|
||||
'-movflags', '+faststart',
|
||||
'-y', temp_file
|
||||
]
|
||||
else:
|
||||
tlog(f"Full transcode using {encoder}: {filepath}")
|
||||
if encoder == 'h264_nvenc':
|
||||
cmd = [
|
||||
'ffmpeg', '-v', 'error', '-stats', '-i', filepath,
|
||||
'-c:v', 'h264_nvenc', '-preset', 'fast', '-cq', '23',
|
||||
'-c:a', 'aac', '-b:a', '192k',
|
||||
'-movflags', '+faststart',
|
||||
'-y', temp_file
|
||||
]
|
||||
elif encoder == 'h264_vaapi':
|
||||
cmd = [
|
||||
'ffmpeg', '-v', 'error', '-stats',
|
||||
'-hwaccel', 'vaapi', '-hwaccel_output_format', 'vaapi',
|
||||
'-i', filepath,
|
||||
'-vf', 'format=nv12,hwupload',
|
||||
'-c:v', 'h264_vaapi', '-b:v', '5M',
|
||||
'-c:a', 'aac', '-b:a', '192k',
|
||||
'-movflags', '+faststart',
|
||||
'-y', temp_file
|
||||
]
|
||||
else: # libx264
|
||||
cmd = [
|
||||
'ffmpeg', '-v', 'error', '-stats', '-i', filepath,
|
||||
'-c:v', 'libx264', '-crf', '23', '-preset', 'medium',
|
||||
'-c:a', 'aac', '-b:a', '192k',
|
||||
'-movflags', '+faststart',
|
||||
'-y', temp_file
|
||||
]
|
||||
|
||||
result = subprocess.run(cmd, capture_output=True, text=True)
|
||||
|
||||
if result.returncode == 0:
|
||||
# Replace original
|
||||
Path(filepath).unlink()
|
||||
Path(temp_file).rename(filepath)
|
||||
tlog(f"✅ Success: {filepath}")
|
||||
return True
|
||||
else:
|
||||
# Check if it's a GPU error and retry with CPU
|
||||
if encoder in ['h264_nvenc', 'h264_vaapi', 'h264_videotoolbox'] and 'libcuda' in result.stderr or 'Cannot load' in result.stderr:
|
||||
tlog(f"⚠️ GPU encoding failed, retrying with CPU (libx264)...")
|
||||
|
||||
# Retry with libx264
|
||||
if video_codec == 'h264':
|
||||
cpu_cmd = [
|
||||
'ffmpeg', '-v', 'error', '-stats', '-i', filepath,
|
||||
'-c:v', 'copy',
|
||||
'-c:a', 'aac', '-b:a', '192k',
|
||||
'-movflags', '+faststart',
|
||||
'-y', temp_file
|
||||
]
|
||||
else:
|
||||
cpu_cmd = [
|
||||
'ffmpeg', '-v', 'error', '-stats', '-i', filepath,
|
||||
'-c:v', 'libx264', '-crf', '23', '-preset', 'medium',
|
||||
'-c:a', 'aac', '-b:a', '192k',
|
||||
'-movflags', '+faststart',
|
||||
'-y', temp_file
|
||||
]
|
||||
|
||||
cpu_result = subprocess.run(cpu_cmd, capture_output=True, text=True)
|
||||
|
||||
if cpu_result.returncode == 0:
|
||||
Path(filepath).unlink()
|
||||
Path(temp_file).rename(filepath)
|
||||
tlog(f"✅ Success (CPU): {filepath}")
|
||||
return True
|
||||
else:
|
||||
tlog(f"❌ Failed (CPU): {filepath}")
|
||||
tlog(f"Error: {cpu_result.stderr}")
|
||||
if Path(temp_file).exists():
|
||||
Path(temp_file).unlink()
|
||||
return False
|
||||
else:
|
||||
tlog(f"❌ Failed: {filepath}")
|
||||
tlog(f"Error: {result.stderr}")
|
||||
if Path(temp_file).exists():
|
||||
Path(temp_file).unlink()
|
||||
return False
|
||||
|
||||
except Exception as e:
|
||||
tlog(f"❌ Exception: {e}")
|
||||
if Path(temp_file).exists():
|
||||
Path(temp_file).unlink()
|
||||
return False
|
||||
|
||||
def sanitize(text):
|
||||
text = text.encode("ascii", "ignore").decode()
|
||||
text = re.sub(r'[\/:*?"<>|]', "_", text)
|
||||
return text.strip()
|
||||
|
||||
def fetch_all_metadata():
|
||||
print("📥 Fetching all video metadata...", flush=True)
|
||||
log("📥 Fetching all video metadata...")
|
||||
video_map = {}
|
||||
page = 1
|
||||
while True:
|
||||
|
|
@ -67,20 +303,16 @@ def fetch_all_metadata():
|
|||
if current is not None and last is not None and current >= last:
|
||||
break
|
||||
else:
|
||||
# Fallback if no pagination info, just stop if empty data (handled above) or arbitrary limit?
|
||||
# If we got data but no pagination, maybe it's a single page result?
|
||||
# But we loop until no data.
|
||||
pass
|
||||
|
||||
print(f" - Page {page} fetched. Total videos so far: {len(video_map)}", flush=True)
|
||||
log(f" - Page {page} fetched. Total videos so far: {len(video_map)}")
|
||||
page += 1
|
||||
|
||||
except Exception as e:
|
||||
print(f"❌ Error fetching page {page}: {e}", flush=True)
|
||||
# If a page fails, maybe we should stop or retry? For now, let's stop to avoid infinite loops on auth error
|
||||
log(f"❌ Error fetching page {page}: {e}")
|
||||
break
|
||||
|
||||
print(f"✅ Metadata fetch complete. Found {len(video_map)} videos.", flush=True)
|
||||
log(f"✅ Metadata fetch complete. Found {len(video_map)} videos.")
|
||||
return video_map
|
||||
|
||||
def cleanup_old_folders():
|
||||
|
|
@ -88,7 +320,7 @@ def cleanup_old_folders():
|
|||
Scans TARGET_DIR for folders containing '+00:00'.
|
||||
Safely deletes them ONLY if they contain no real files (only symlinks or empty).
|
||||
"""
|
||||
print("🧹 Starting cleanup. Scanning ONLY for folders containing '+00:00'...", flush=True)
|
||||
log("🧹 Starting cleanup. Scanning ONLY for folders containing '+00:00'...")
|
||||
cleaned_count = 0
|
||||
skipped_count = 0
|
||||
|
||||
|
|
@ -123,15 +355,86 @@ def cleanup_old_folders():
|
|||
item.unlink()
|
||||
# Remove directory
|
||||
video_dir.rmdir()
|
||||
print(f" [DELETED] {video_dir.name}", flush=True)
|
||||
log(f" [DELETED] {video_dir.name}")
|
||||
cleaned_count += 1
|
||||
except Exception as e:
|
||||
print(f" ❌ Failed to delete {video_dir.name}: {e}", flush=True)
|
||||
log(f" ❌ Failed to delete {video_dir.name}: {e}")
|
||||
else:
|
||||
print(f" ⚠️ SKIPPING {video_dir.name} - {reason}", flush=True)
|
||||
log(f" ⚠️ SKIPPING {video_dir.name} - {reason}")
|
||||
skipped_count += 1
|
||||
|
||||
print(f"🧹 Cleanup complete. Removed: {cleaned_count}, Skipped: {skipped_count}", flush=True)
|
||||
log(f"🧹 Cleanup complete. Removed: {cleaned_count}, Skipped: {skipped_count}")
|
||||
|
||||
def check_orphaned_links():
|
||||
"""
|
||||
Scans TARGET_DIR for video.mp4 symlinks and checks if they point to valid files.
|
||||
For orphaned links, parses the folder structure to extract metadata.
|
||||
Stores results in database.
|
||||
"""
|
||||
log("🔍 Checking for orphaned symlinks...")
|
||||
orphaned = []
|
||||
total_checked = 0
|
||||
|
||||
if not TARGET_DIR.exists():
|
||||
log("⚠️ Target directory does not exist")
|
||||
return orphaned
|
||||
|
||||
with get_db() as conn:
|
||||
for channel_dir in TARGET_DIR.iterdir():
|
||||
if not channel_dir.is_dir():
|
||||
continue
|
||||
|
||||
channel_name = channel_dir.name
|
||||
|
||||
for video_dir in channel_dir.iterdir():
|
||||
if not video_dir.is_dir():
|
||||
continue
|
||||
|
||||
folder_name = video_dir.name
|
||||
|
||||
# Look for video files
|
||||
for video_file in video_dir.glob("video.*"):
|
||||
total_checked += 1
|
||||
|
||||
if video_file.is_symlink():
|
||||
try:
|
||||
# Check if the symlink target exists
|
||||
target = Path(os.readlink(video_file))
|
||||
|
||||
if not target.exists():
|
||||
# Parse folder name: "YYYY-MM-DD - Title"
|
||||
parts = folder_name.split(" - ", 1)
|
||||
published = parts[0] if len(parts) > 0 else "unknown"
|
||||
title = parts[1] if len(parts) > 1 else folder_name
|
||||
|
||||
# Try to extract video ID from symlink target path
|
||||
video_id = target.stem if target.stem else "unknown"
|
||||
|
||||
orphaned.append({
|
||||
"video_id": video_id,
|
||||
"path": str(video_file),
|
||||
"target": str(target),
|
||||
"folder": folder_name,
|
||||
"channel": channel_name,
|
||||
"title": title,
|
||||
"published": published
|
||||
})
|
||||
|
||||
# Store in DB
|
||||
conn.execute("""
|
||||
INSERT OR REPLACE INTO videos
|
||||
(video_id, title, channel, published, symlink, status)
|
||||
VALUES (?, ?, ?, ?, ?, 'missing')
|
||||
""", (video_id, title, channel_name, published, str(video_file)))
|
||||
|
||||
log(f" ⚠️ BROKEN: {folder_name} -> {target}")
|
||||
except Exception as e:
|
||||
log(f" ❌ ERROR: {folder_name}: {e}")
|
||||
|
||||
conn.commit()
|
||||
|
||||
log(f"✅ Check complete. Scanned {total_checked} files, found {len(orphaned)} orphaned symlinks.")
|
||||
return orphaned
|
||||
|
||||
# Main logic
|
||||
|
||||
|
|
@ -149,116 +452,231 @@ def process_videos():
|
|||
new_links = 0
|
||||
verified_links = 0
|
||||
|
||||
try:
|
||||
for channel_path in SOURCE_DIR.iterdir():
|
||||
if not channel_path.is_dir():
|
||||
continue
|
||||
for video_file in channel_path.glob("*.*"):
|
||||
video_id = video_file.stem
|
||||
|
||||
# 2. Lookup in local map
|
||||
meta = video_map.get(video_id)
|
||||
if not meta:
|
||||
continue
|
||||
sanitized_channel_name = sanitize(meta["channel_name"])
|
||||
channel_dir = TARGET_DIR / sanitized_channel_name
|
||||
channel_dir.mkdir(parents=True, exist_ok=True)
|
||||
sanitized_title = sanitize(meta["title"])
|
||||
folder_name = f"{meta['published']} - {sanitized_title}"
|
||||
video_dir = channel_dir / folder_name
|
||||
video_dir.mkdir(parents=True, exist_ok=True)
|
||||
actual_file = next(channel_path.glob(f"{video_id}.*"), None)
|
||||
if not actual_file:
|
||||
continue
|
||||
host_path_root = Path("/mnt/user/tubearchives/bp")
|
||||
host_source_path = host_path_root / actual_file.relative_to(SOURCE_DIR)
|
||||
dest_file = video_dir / f"video{actual_file.suffix}"
|
||||
try:
|
||||
if dest_file.exists():
|
||||
if dest_file.is_symlink():
|
||||
current_target = Path(os.readlink(dest_file))
|
||||
if current_target.resolve() != host_source_path.resolve():
|
||||
dest_file.unlink()
|
||||
os.symlink(host_source_path, dest_file)
|
||||
print(f" [FIX] Relinked: {folder_name}", flush=True)
|
||||
new_links += 1
|
||||
else:
|
||||
verified_links += 1
|
||||
else:
|
||||
os.symlink(host_source_path, dest_file)
|
||||
print(f" [NEW] Linked: {folder_name}", flush=True)
|
||||
new_links += 1
|
||||
except Exception:
|
||||
pass
|
||||
processed_videos.append({
|
||||
"video_id": video_id,
|
||||
"title": meta["title"],
|
||||
"channel": meta["channel_name"],
|
||||
"published": meta["published"],
|
||||
"symlink": str(dest_file)
|
||||
})
|
||||
except Exception as e:
|
||||
return str(e)
|
||||
with get_db() as conn:
|
||||
# Clear existing "linked" videos (we'll repopulate)
|
||||
conn.execute("DELETE FROM videos WHERE status = 'linked'")
|
||||
|
||||
print(f"✅ Scan complete. Processed {len(processed_videos)} videos.", flush=True)
|
||||
print(f" - New/Fixed Links: {new_links}", flush=True)
|
||||
print(f" - Verified Links: {verified_links}", flush=True)
|
||||
try:
|
||||
for channel_path in SOURCE_DIR.iterdir():
|
||||
if not channel_path.is_dir():
|
||||
continue
|
||||
for video_file in channel_path.glob("*.*"):
|
||||
video_id = video_file.stem
|
||||
|
||||
# Lookup in local map
|
||||
meta = video_map.get(video_id)
|
||||
if not meta:
|
||||
continue
|
||||
sanitized_channel_name = sanitize(meta["channel_name"])
|
||||
channel_dir = TARGET_DIR / sanitized_channel_name
|
||||
channel_dir.mkdir(parents=True, exist_ok=True)
|
||||
sanitized_title = sanitize(meta["title"])
|
||||
folder_name = f"{meta['published']} - {sanitized_title}"
|
||||
video_dir = channel_dir / folder_name
|
||||
video_dir.mkdir(parents=True, exist_ok=True)
|
||||
actual_file = next(channel_path.glob(f"{video_id}.*"), None)
|
||||
if not actual_file:
|
||||
continue
|
||||
host_path_root = Path("/mnt/user/tubearchives/bp")
|
||||
host_source_path = host_path_root / actual_file.relative_to(SOURCE_DIR)
|
||||
dest_file = video_dir / f"video{actual_file.suffix}"
|
||||
try:
|
||||
if dest_file.exists():
|
||||
if dest_file.is_symlink():
|
||||
current_target = Path(os.readlink(dest_file))
|
||||
if current_target.resolve() != host_source_path.resolve():
|
||||
dest_file.unlink()
|
||||
os.symlink(host_source_path, dest_file)
|
||||
log(f" [FIX] Relinked: {folder_name}")
|
||||
new_links += 1
|
||||
else:
|
||||
verified_links += 1
|
||||
else:
|
||||
os.symlink(host_source_path, dest_file)
|
||||
log(f" [NEW] Linked: {folder_name}")
|
||||
new_links += 1
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Store in database
|
||||
conn.execute("""
|
||||
INSERT OR REPLACE INTO videos
|
||||
(video_id, title, channel, published, symlink, status)
|
||||
VALUES (?, ?, ?, ?, ?, 'linked')
|
||||
""", (video_id, meta["title"], meta["channel_name"],
|
||||
meta["published"], str(dest_file)))
|
||||
|
||||
processed_videos.append({
|
||||
"video_id": video_id,
|
||||
"title": meta["title"],
|
||||
"channel": meta["channel_name"],
|
||||
"published": meta["published"],
|
||||
"symlink": str(dest_file)
|
||||
})
|
||||
except Exception as e:
|
||||
conn.rollback()
|
||||
return str(e)
|
||||
|
||||
conn.commit()
|
||||
|
||||
log(f"✅ Scan complete. Processed {len(processed_videos)} videos.")
|
||||
log(f" - New/Fixed Links: {new_links}")
|
||||
log(f" - Verified Links: {verified_links}")
|
||||
return None
|
||||
|
||||
def scheduler():
|
||||
print(f"🕒 Background scheduler started. Scanning every {SCAN_INTERVAL} minutes.", flush=True)
|
||||
log(f"🕒 Background scheduler started. Scanning every {SCAN_INTERVAL} minutes.")
|
||||
while True:
|
||||
print("🔄 Running scheduled scan...", flush=True)
|
||||
log("🔄 Running scheduled scan...")
|
||||
process_videos()
|
||||
time.sleep(SCAN_INTERVAL * 60)
|
||||
|
||||
# Flask routes
|
||||
|
||||
@app.before_request
|
||||
def limit_remote_addr():
|
||||
# Skip check for local requests if needed, but generally good to enforce
|
||||
client_ip = request.remote_addr
|
||||
try:
|
||||
ip_obj = ipaddress.ip_address(client_ip)
|
||||
allowed = False
|
||||
for allowed_ip in ALLOWED_IPS:
|
||||
if not allowed_ip: continue
|
||||
if "/" in allowed_ip:
|
||||
if ip_obj in ipaddress.ip_network(allowed_ip, strict=False):
|
||||
allowed = True
|
||||
break
|
||||
else:
|
||||
if ip_obj == ipaddress.ip_address(allowed_ip):
|
||||
allowed = True
|
||||
break
|
||||
if not allowed:
|
||||
log(f"⛔ Access denied for IP: {client_ip}")
|
||||
abort(403)
|
||||
except ValueError as e:
|
||||
log(f"⛔ Invalid IP format: {client_ip}, Error: {e}")
|
||||
abort(403)
|
||||
|
||||
@app.route("/")
|
||||
def index():
|
||||
return render_template_string('''
|
||||
<html>
|
||||
<head><title>TA Organizerr</title></head>
|
||||
<body>
|
||||
<h1>TA Organizerr</h1>
|
||||
<form method="post" action="/process">
|
||||
<button type="submit">Process Videos</button>
|
||||
</form>
|
||||
<h2>Processed Videos</h2>
|
||||
<ul>
|
||||
{% for v in videos %}
|
||||
<li>{{v.published}} - {{v.title}} ({{v.channel}}) <br>Symlink: {{v.symlink}}</li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
</body>
|
||||
</html>
|
||||
''', videos=processed_videos)
|
||||
return render_template('dashboard.html')
|
||||
|
||||
@app.route("/process", methods=["POST"])
|
||||
def process():
|
||||
error = process_videos()
|
||||
if error:
|
||||
return f"Error: {error}", 500
|
||||
return render_template_string('''
|
||||
<html>
|
||||
<head><title>TA Organizerr</title></head>
|
||||
<body>
|
||||
<h1>TA Organizerr</h1>
|
||||
<form method="post" action="/process">
|
||||
<button type="submit">Process Videos</button>
|
||||
</form>
|
||||
<h2>Processed Videos</h2>
|
||||
<ul>
|
||||
{% for v in videos %}
|
||||
<li>{{v.published}} - {{v.title}} ({{v.channel}}) <br>Symlink: {{v.symlink}}</li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
</body>
|
||||
</html>
|
||||
''', videos=processed_videos)
|
||||
@app.route("/api/status")
|
||||
def api_status():
|
||||
with get_db() as conn:
|
||||
# Get all videos from DB
|
||||
videos = []
|
||||
for row in conn.execute("SELECT * FROM videos ORDER BY channel, published DESC"):
|
||||
videos.append({
|
||||
"video_id": row["video_id"],
|
||||
"title": row["title"],
|
||||
"channel": row["channel"],
|
||||
"published": row["published"],
|
||||
"symlink": row["symlink"],
|
||||
"status": row["status"]
|
||||
})
|
||||
|
||||
# Calculate stats
|
||||
total = len(videos)
|
||||
linked = sum(1 for v in videos if v["status"] == "linked")
|
||||
missing = sum(1 for v in videos if v["status"] == "missing")
|
||||
|
||||
return jsonify({
|
||||
"total_videos": total,
|
||||
"verified_links": linked,
|
||||
"missing_count": missing,
|
||||
"videos": videos
|
||||
})
|
||||
|
||||
@app.route("/api/videos")
|
||||
def api_videos():
|
||||
return jsonify(processed_videos)
|
||||
@app.route("/api/logs")
|
||||
def api_logs():
|
||||
start = request.args.get('start', 0, type=int)
|
||||
with log_lock:
|
||||
return jsonify({
|
||||
"logs": log_buffer[start:],
|
||||
"next_index": len(log_buffer)
|
||||
})
|
||||
|
||||
@app.route("/api/scan", methods=["POST"])
|
||||
def api_scan():
|
||||
# Run in background to avoid blocking
|
||||
threading.Thread(target=process_videos).start()
|
||||
return jsonify({"status": "started"})
|
||||
|
||||
@app.route("/api/cleanup", methods=["POST"])
|
||||
def api_cleanup():
|
||||
threading.Thread(target=cleanup_old_folders).start()
|
||||
return jsonify({"status": "started"})
|
||||
|
||||
@app.route("/api/check-orphans", methods=["POST"])
|
||||
def api_check_orphans():
|
||||
orphaned = check_orphaned_links()
|
||||
return jsonify({"status": "complete", "orphaned": orphaned, "count": len(orphaned)})
|
||||
|
||||
@app.route("/transcode")
|
||||
def transcode_page():
|
||||
return render_template('transcoding.html')
|
||||
|
||||
@app.route("/api/transcode/videos")
|
||||
def api_transcode_videos():
|
||||
"""Get all videos that need transcoding."""
|
||||
page = request.args.get('page', 1, type=int)
|
||||
per_page = request.args.get('per_page', 100, type=int)
|
||||
offset = (page - 1) * per_page
|
||||
|
||||
with get_db() as conn:
|
||||
# Get total count
|
||||
total = conn.execute("SELECT COUNT(*) as count FROM videos WHERE status = 'missing'").fetchone()['count']
|
||||
|
||||
videos = []
|
||||
for row in conn.execute(
|
||||
"SELECT * FROM videos WHERE status = 'missing' LIMIT ? OFFSET ?",
|
||||
(per_page, offset)
|
||||
):
|
||||
videos.append({
|
||||
"video_id": row["video_id"],
|
||||
"title": row["title"],
|
||||
"channel": row["channel"],
|
||||
"published": row["published"],
|
||||
"symlink": row["symlink"]
|
||||
})
|
||||
|
||||
return jsonify({
|
||||
"videos": videos,
|
||||
"total": total,
|
||||
"page": page,
|
||||
"per_page": per_page,
|
||||
"pages": (total + per_page - 1) // per_page
|
||||
})
|
||||
|
||||
@app.route("/api/transcode/start", methods=["POST"])
|
||||
def api_transcode_start():
|
||||
"""Start transcoding a video."""
|
||||
data = request.get_json()
|
||||
filepath = data.get('filepath')
|
||||
|
||||
if not filepath:
|
||||
return jsonify({"error": "No filepath provided"}), 400
|
||||
|
||||
encoder = detect_encoder()
|
||||
tlog(f"🖥️ Selected encoder: {encoder}")
|
||||
|
||||
# Run in background
|
||||
def run_transcode():
|
||||
transcode_video(filepath, encoder)
|
||||
|
||||
threading.Thread(target=run_transcode).start()
|
||||
return jsonify({"message": "Transcode started", "encoder": encoder})
|
||||
|
||||
@app.route("/api/transcode/logs")
|
||||
def api_transcode_logs():
|
||||
"""Get transcode logs."""
|
||||
start = request.args.get('start', 0, type=int)
|
||||
with transcode_log_lock:
|
||||
return jsonify({
|
||||
"logs": transcode_log_buffer[start:],
|
||||
"next_index": len(transcode_log_buffer)
|
||||
})
|
||||
|
||||
if __name__ == "__main__":
|
||||
# Start scheduler in background thread
|
||||
|
|
|
|||
305
templates/dashboard.html
Normal file
305
templates/dashboard.html
Normal file
|
|
@ -0,0 +1,305 @@
|
|||
<!DOCTYPE html>
|
||||
<html lang="en" data-bs-theme="dark">
|
||||
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>TA Organizerr Dashboard</title>
|
||||
<link href="https://cdn.jsdelivr.net/npm/bootstrap@5.3.0/dist/css/bootstrap.min.css" rel="stylesheet">
|
||||
<link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/bootstrap-icons@1.10.0/font/bootstrap-icons.css">
|
||||
<style>
|
||||
.log-box {
|
||||
height: 300px;
|
||||
overflow-y: scroll;
|
||||
font-family: monospace;
|
||||
font-size: 0.9em;
|
||||
background-color: #1e1e1e;
|
||||
color: #00ff00;
|
||||
border: 1px solid #444;
|
||||
padding: 10px;
|
||||
}
|
||||
|
||||
.status-dot {
|
||||
height: 10px;
|
||||
width: 10px;
|
||||
background-color: #bbb;
|
||||
border-radius: 50%;
|
||||
display: inline-block;
|
||||
}
|
||||
|
||||
.status-green {
|
||||
background-color: #28a745;
|
||||
}
|
||||
|
||||
.status-yellow {
|
||||
background-color: #ffc107;
|
||||
}
|
||||
|
||||
.status-red {
|
||||
background-color: #dc3545;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<div class="container-fluid p-4">
|
||||
<header class="d-flex justify-content-between align-items-center mb-4">
|
||||
<h1><i class="bi bi-folder2-open"></i> TA Organizerr</h1>
|
||||
<div>
|
||||
<a href="/transcode" class="btn btn-outline-primary me-2">
|
||||
<i class="bi bi-film"></i> Transcode
|
||||
</a>
|
||||
<span class="badge bg-secondary" id="connection-status">Connecting...</span>
|
||||
</div>
|
||||
</header>
|
||||
|
||||
<!-- Stats Row -->
|
||||
<div class="row mb-4">
|
||||
<div class="col-md-3">
|
||||
<div class="card text-bg-primary mb-3">
|
||||
<div class="card-header">Total Videos</div>
|
||||
<div class="card-body">
|
||||
<h2 class="card-title" id="stat-total">0</h2>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="col-md-3">
|
||||
<div class="card text-bg-success mb-3">
|
||||
<div class="card-header">Linked & Verified</div>
|
||||
<div class="card-body">
|
||||
<h2 class="card-title" id="stat-linked">0</h2>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="col-md-3">
|
||||
<div class="card text-bg-warning mb-3">
|
||||
<div class="card-header">New / Fixed</div>
|
||||
<div class="card-body">
|
||||
<h2 class="card-title" id="stat-new">0</h2>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="col-md-3">
|
||||
<div class="card text-bg-danger mb-3">
|
||||
<div class="card-header">Missing / Error</div>
|
||||
<div class="card-body">
|
||||
<h2 class="card-title" id="stat-error">0</h2>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Controls & Logs -->
|
||||
<div class="row mb-4">
|
||||
<div class="col-md-4">
|
||||
<div class="card h-100">
|
||||
<div class="card-header">Control Panel</div>
|
||||
<div class="card-body d-grid gap-2">
|
||||
<button class="btn btn-primary btn-lg" onclick="triggerScan()">
|
||||
<i class="bi bi-arrow-repeat"></i> SCAN NOW
|
||||
</button>
|
||||
<button class="btn btn-danger" onclick="triggerCleanup()">
|
||||
<i class="bi bi-trash"></i> CLEAN OLD FOLDERS (+00:00)
|
||||
</button>
|
||||
<button class="btn btn-warning" onclick="checkOrphans()">
|
||||
<i class="bi bi-search"></i> CHECK ORPHANED LINKS
|
||||
</button>
|
||||
<hr>
|
||||
<small class="text-muted">Next scheduled scan in: <span id="next-scan">--</span> min</small>
|
||||
<div id="orphan-results" class="mt-2"></div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="col-md-8">
|
||||
<div class="card h-100">
|
||||
<div class="card-header d-flex justify-content-between">
|
||||
<span>Live Logs</span>
|
||||
<button class="btn btn-sm btn-outline-secondary" onclick="clearLogs()">Clear</button>
|
||||
</div>
|
||||
<div class="card-body p-0">
|
||||
<div class="log-box" id="log-container">
|
||||
<div>Waiting for logs...</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="card">
|
||||
<div class="card-header d-flex justify-content-between align-items-center">
|
||||
<span>Video Matrix</span>
|
||||
<div class="d-flex gap-2">
|
||||
<select class="form-select form-select-sm" id="status-filter" style="width: 150px;">
|
||||
<option value="">All Status</option>
|
||||
<option value="linked">Linked</option>
|
||||
<option value="missing">Missing</option>
|
||||
</select>
|
||||
<select class="form-select form-select-sm" id="channel-filter" style="width: 200px;">
|
||||
<option value="">All Channels</option>
|
||||
</select>
|
||||
<input type="text" class="form-control form-control-sm" id="search-input"
|
||||
placeholder="Search videos..." style="width: 200px;">
|
||||
</div>
|
||||
</div>
|
||||
<div class="card-body p-0 table-responsive" style="max-height: 500px;">
|
||||
<table class="table table-striped table-hover mb-0">
|
||||
<thead class="table-dark sticky-top">
|
||||
<tr>
|
||||
<th>Status</th>
|
||||
<th>Published</th>
|
||||
<th>Channel</th>
|
||||
<th>Title</th>
|
||||
<th>Video ID</th>
|
||||
<th>Symlink Path</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody id="video-table-body">
|
||||
<!-- Rows injected by JS -->
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<script src="https://cdn.jsdelivr.net/npm/bootstrap@5.3.0/dist/js/bootstrap.bundle.min.js"></script>
|
||||
<script>
|
||||
// Poll for status every 2 seconds
|
||||
setInterval(fetchStatus, 2000);
|
||||
// Poll for logs every 1 second
|
||||
setInterval(fetchLogs, 1000);
|
||||
|
||||
let lastLogIndex = 0;
|
||||
|
||||
async function fetchStatus() {
|
||||
try {
|
||||
const res = await fetch('/api/status');
|
||||
const data = await res.json();
|
||||
|
||||
document.getElementById('connection-status').textContent = 'Connected';
|
||||
document.getElementById('connection-status').className = 'badge bg-success';
|
||||
|
||||
// Update stats
|
||||
document.getElementById('stat-total').textContent = data.total_videos;
|
||||
document.getElementById('stat-linked').textContent = data.verified_links;
|
||||
document.getElementById('stat-error').textContent = data.missing_count || 0;
|
||||
|
||||
// Update table if changed (simple check, can be optimized)
|
||||
updateTable(data.videos);
|
||||
|
||||
} catch (e) {
|
||||
document.getElementById('connection-status').textContent = 'Disconnected';
|
||||
document.getElementById('connection-status').className = 'badge bg-danger';
|
||||
}
|
||||
}
|
||||
|
||||
async function fetchLogs() {
|
||||
try {
|
||||
const res = await fetch('/api/logs?start=' + lastLogIndex);
|
||||
const data = await res.json();
|
||||
if (data.logs && data.logs.length > 0) {
|
||||
const logBox = document.getElementById('log-container');
|
||||
data.logs.forEach(line => {
|
||||
const div = document.createElement('div');
|
||||
div.textContent = line;
|
||||
logBox.appendChild(div);
|
||||
});
|
||||
logBox.scrollTop = logBox.scrollHeight;
|
||||
lastLogIndex = data.next_index;
|
||||
}
|
||||
} catch (e) { console.error(e); }
|
||||
}
|
||||
|
||||
async function triggerScan() {
|
||||
if (!confirm("Start a full library scan? This may take a moment.")) return;
|
||||
await fetch('/api/scan', { method: 'POST' });
|
||||
alert("Scan started! Watch the logs.");
|
||||
}
|
||||
|
||||
async function triggerCleanup() {
|
||||
if (!confirm("Are you sure you want to delete all folders containing '+00:00'?\n\nSafety check: Real files will NOT be deleted.")) return;
|
||||
await fetch('/api/cleanup', { method: 'POST' });
|
||||
alert("Cleanup started! Watch the logs.");
|
||||
}
|
||||
|
||||
async function checkOrphans() {
|
||||
const resultsDiv = document.getElementById('orphan-results');
|
||||
resultsDiv.innerHTML = '<div class="spinner-border spinner-border-sm" role="status"></div> Checking...';
|
||||
|
||||
const res = await fetch('/api/check-orphans', { method: 'POST' });
|
||||
const data = await res.json();
|
||||
|
||||
if (data.count === 0) {
|
||||
resultsDiv.innerHTML = '<div class="alert alert-success p-2 mb-0 mt-2">✅ No orphaned links found!</div>';
|
||||
} else {
|
||||
resultsDiv.innerHTML = `<div class="alert alert-warning p-2 mb-0 mt-2">⚠️ Found ${data.count} orphaned links. Check logs for details.</div>`;
|
||||
}
|
||||
|
||||
setTimeout(() => { resultsDiv.innerHTML = ''; }, 10000);
|
||||
}
|
||||
|
||||
function clearLogs() {
|
||||
document.getElementById('log-container').innerHTML = '';
|
||||
}
|
||||
|
||||
function updateTable(videos) {
|
||||
const tbody = document.getElementById('video-table-body');
|
||||
const search = document.getElementById('search-input').value.toLowerCase();
|
||||
const channelFilter = document.getElementById('channel-filter').value;
|
||||
const statusFilter = document.getElementById('status-filter').value;
|
||||
|
||||
// Build unique channel list for dropdown
|
||||
const channels = [...new Set(videos.map(v => v.channel))].sort();
|
||||
const channelDropdown = document.getElementById('channel-filter');
|
||||
const currentValue = channelDropdown.value;
|
||||
|
||||
// Only update dropdown if channel list changed
|
||||
if (channelDropdown.options.length !== channels.length + 1) {
|
||||
channelDropdown.innerHTML = '<option value="">All Channels</option>';
|
||||
channels.forEach(ch => {
|
||||
const opt = document.createElement('option');
|
||||
opt.value = ch;
|
||||
opt.textContent = ch;
|
||||
channelDropdown.appendChild(opt);
|
||||
});
|
||||
channelDropdown.value = currentValue;
|
||||
}
|
||||
|
||||
// Clear and rebuild table
|
||||
tbody.innerHTML = '';
|
||||
|
||||
videos.forEach(v => {
|
||||
// Apply filters
|
||||
if (search && !v.title.toLowerCase().includes(search) && !v.video_id.includes(search)) return;
|
||||
if (channelFilter && v.channel !== channelFilter) return;
|
||||
if (statusFilter && v.status !== statusFilter) return;
|
||||
|
||||
const tr = document.createElement('tr');
|
||||
let statusColor = v.status === 'linked' ? 'status-green' : 'status-red';
|
||||
|
||||
tr.innerHTML = `
|
||||
<td><span class="status-dot ${statusColor}"></span></td>
|
||||
<td>${v.published}</td>
|
||||
<td>${v.channel}</td>
|
||||
<td>${v.title}</td>
|
||||
<td><code>${v.video_id}</code></td>
|
||||
<td class="text-truncate" style="max-width: 300px;">${v.symlink || '-'}</td>
|
||||
`;
|
||||
tbody.appendChild(tr);
|
||||
});
|
||||
}
|
||||
|
||||
document.getElementById('search-input').addEventListener('input', () => {
|
||||
fetchStatus();
|
||||
});
|
||||
|
||||
document.getElementById('channel-filter').addEventListener('change', () => {
|
||||
fetchStatus();
|
||||
});
|
||||
|
||||
document.getElementById('status-filter').addEventListener('change', () => {
|
||||
fetchStatus();
|
||||
});
|
||||
</script>
|
||||
</body>
|
||||
|
||||
</html>
|
||||
213
templates/transcoding.html
Normal file
213
templates/transcoding.html
Normal file
|
|
@ -0,0 +1,213 @@
|
|||
<!DOCTYPE html>
|
||||
<html lang="en" data-bs-theme="dark">
|
||||
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>Transcode Manager - TA Organizerr</title>
|
||||
<link href="https://cdn.jsdelivr.net/npm/bootstrap@5.3.0/dist/css/bootstrap.min.css" rel="stylesheet">
|
||||
<link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/bootstrap-icons@1.10.0/font/bootstrap-icons.css">
|
||||
<style>
|
||||
.codec-badge {
|
||||
font-family: monospace;
|
||||
font-size: 0.8em;
|
||||
}
|
||||
|
||||
.codec-ok {
|
||||
background-color: #28a745;
|
||||
}
|
||||
|
||||
.codec-warn {
|
||||
background-color: #ffc107;
|
||||
color: #000;
|
||||
}
|
||||
|
||||
.codec-bad {
|
||||
background-color: #dc3545;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<div class="container-fluid p-4">
|
||||
<header class="d-flex justify-content-between align-items-center mb-4">
|
||||
<h1><i class="bi bi-film"></i> Transcode Manager</h1>
|
||||
<a href="/" class="btn btn-secondary"><i class="bi bi-arrow-left"></i> Back to Dashboard</a>
|
||||
</header>
|
||||
|
||||
<div class="alert alert-info d-flex justify-content-between align-items-center">
|
||||
<div>
|
||||
<strong>ℹ️ Info:</strong> This page shows videos with broken/missing source files. Click "Find Missing
|
||||
Videos" to scan for orphaned symlinks, then transcode them to restore compatibility.
|
||||
</div>
|
||||
<button class="btn btn-primary" onclick="findMissing()">
|
||||
<i class="bi bi-search"></i> Find Missing Videos
|
||||
</button>
|
||||
</div>
|
||||
|
||||
<div class="card mb-4">
|
||||
<div class="card-header d-flex justify-content-between align-items-center">
|
||||
<h5>Transcode Queue</h5>
|
||||
<div id="pagination-info" class="text-muted"></div>
|
||||
</div>
|
||||
<div class="card-body">
|
||||
<table class="table table-striped">
|
||||
<thead>
|
||||
<tr>
|
||||
<th style="width: 15%;">Channel</th>
|
||||
<th style="width: 10%;">Published</th>
|
||||
<th style="width: 30%;">Title</th>
|
||||
<th style="width: 30%;">Symlink Path</th>
|
||||
<th style="width: 15%;">Actions</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody id="transcode-table">
|
||||
<tr>
|
||||
<td colspan="5" class="text-center">
|
||||
<div class="spinner-border" role="status"></div> Loading...
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
<nav>
|
||||
<ul class="pagination justify-content-center" id="pagination">
|
||||
</ul>
|
||||
</nav>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="card">
|
||||
<div class="card-header">Transcoding Log</div>
|
||||
<div class="card-body">
|
||||
<div id="transcode-log"
|
||||
style="height: 300px; overflow-y: scroll; font-family: monospace; font-size: 0.9em; background-color: #1e1e1e; color: #00ff00; padding: 10px;">
|
||||
<div>Waiting for transcode jobs...</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<script src="https://cdn.jsdelivr.net/npm/bootstrap@5.3.0/dist/js/bootstrap.bundle.min.js"></script>
|
||||
<script>
|
||||
let transcodeLogIndex = 0;
|
||||
let currentPage = 1;
|
||||
|
||||
async function findMissing() {
|
||||
const tbody = document.getElementById('transcode-table');
|
||||
tbody.innerHTML = '<tr><td colspan="7" class="text-center"><div class="spinner-border" role="status"></div> Scanning for orphaned symlinks...</td></tr>';
|
||||
|
||||
try {
|
||||
const res = await fetch('/api/check-orphans', { method: 'POST' });
|
||||
const data = await res.json();
|
||||
alert(`Found ${data.count} videos with broken/missing source files.`);
|
||||
currentPage = 1;
|
||||
loadVideos();
|
||||
} catch (e) {
|
||||
alert("Scan failed: " + e);
|
||||
loadVideos();
|
||||
}
|
||||
}
|
||||
|
||||
async function loadVideos(page = 1) {
|
||||
try {
|
||||
const res = await fetch(`/api/transcode/videos?page=${page}&per_page=100`);
|
||||
const data = await res.json();
|
||||
|
||||
const tbody = document.getElementById('transcode-table');
|
||||
tbody.innerHTML = '';
|
||||
|
||||
if (data.videos.length === 0) {
|
||||
tbody.innerHTML = '<tr><td colspan="5" class="text-center">No missing videos found. Click "Find Missing Videos" to scan.</td></tr>';
|
||||
return;
|
||||
}
|
||||
|
||||
data.videos.forEach(v => {
|
||||
const tr = document.createElement('tr');
|
||||
|
||||
tr.innerHTML = `
|
||||
<td>${v.channel}</td>
|
||||
<td>${v.published}</td>
|
||||
<td class="text-truncate" style="max-width: 200px;" title="${v.title}">${v.title}</td>
|
||||
<td class="text-truncate" style="max-width: 250px;" title="${v.symlink}"><small>${v.symlink}</small></td>
|
||||
<td>
|
||||
<button class="btn btn-sm btn-primary" onclick="transcode('${v.symlink}')">
|
||||
<i class="bi bi-play"></i> Transcode
|
||||
</button>
|
||||
</td>
|
||||
`;
|
||||
tbody.appendChild(tr);
|
||||
});
|
||||
|
||||
// Update pagination info
|
||||
document.getElementById('pagination-info').textContent =
|
||||
`Showing ${data.videos.length} of ${data.total} videos (Page ${data.page}/${data.pages})`;
|
||||
|
||||
// Render pagination
|
||||
const pagination = document.getElementById('pagination');
|
||||
pagination.innerHTML = '';
|
||||
|
||||
if (data.pages > 1) {
|
||||
// Previous button
|
||||
const prevLi = document.createElement('li');
|
||||
prevLi.className = `page-item ${data.page === 1 ? 'disabled' : ''}`;
|
||||
prevLi.innerHTML = `<a class="page-link" href="#" onclick="loadVideos(${data.page - 1}); return false;">Previous</a>`;
|
||||
pagination.appendChild(prevLi);
|
||||
|
||||
// Page numbers (show max 5 pages around current)
|
||||
const startPage = Math.max(1, data.page - 2);
|
||||
const endPage = Math.min(data.pages, data.page + 2);
|
||||
|
||||
for (let i = startPage; i <= endPage; i++) {
|
||||
const li = document.createElement('li');
|
||||
li.className = `page-item ${i === data.page ? 'active' : ''}`;
|
||||
li.innerHTML = `<a class="page-link" href="#" onclick="loadVideos(${i}); return false;">${i}</a>`;
|
||||
pagination.appendChild(li);
|
||||
}
|
||||
|
||||
// Next button
|
||||
const nextLi = document.createElement('li');
|
||||
nextLi.className = `page-item ${data.page === data.pages ? 'disabled' : ''}`;
|
||||
nextLi.innerHTML = `<a class="page-link" href="#" onclick="loadVideos(${data.page + 1}); return false;">Next</a>`;
|
||||
pagination.appendChild(nextLi);
|
||||
}
|
||||
} catch (e) {
|
||||
console.error(e);
|
||||
}
|
||||
}
|
||||
|
||||
async function transcode(filepath) {
|
||||
if (!confirm("Start transcoding this video? This may take a while.")) return;
|
||||
|
||||
const res = await fetch('/api/transcode/start', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ filepath })
|
||||
});
|
||||
|
||||
const data = await res.json();
|
||||
alert(data.message || "Transcode started! Check the log below.");
|
||||
}
|
||||
|
||||
async function fetchLogs() {
|
||||
try {
|
||||
const res = await fetch('/api/transcode/logs?start=' + transcodeLogIndex);
|
||||
const data = await res.json();
|
||||
if (data.logs && data.logs.length > 0) {
|
||||
const logBox = document.getElementById('transcode-log');
|
||||
data.logs.forEach(line => {
|
||||
const div = document.createElement('div');
|
||||
div.textContent = line;
|
||||
logBox.appendChild(div);
|
||||
});
|
||||
logBox.scrollTop = logBox.scrollHeight;
|
||||
transcodeLogIndex = data.next_index;
|
||||
}
|
||||
} catch (e) { console.error(e); }
|
||||
}
|
||||
|
||||
loadVideos();
|
||||
setInterval(fetchLogs, 1000);
|
||||
</script>
|
||||
</body>
|
||||
|
||||
</html>
|
||||
Loading…
Add table
Add a link
Reference in a new issue