"""Save.TV Web-UI — Film-Archiv durchsuchen und downloaden.
Läuft auf Port 8765 in CT 116.
Erreichbar via Tailscale: http://100.123.47.7:8765
"""
import os
import re as _re
import subprocess
import sys
import json
import threading
import urllib.request
from datetime import datetime
from pathlib import Path
sys.path.insert(0, os.path.dirname(__file__))
sys.path.insert(0, '/opt')
from flask import Flask, jsonify, render_template_string, request
from tools import savetv, savetv_country_filter
app = Flask(__name__)
DOWNLOAD_LOG = Path("/mnt/savetv/.download_log.json")
# ── Archiv-Cache (Hintergrund-Refresh) ──────────────────────────────────────
_ARCHIVE_CACHE_FILE = Path("/mnt/savetv/.archive_cache.json")
_ARCHIVE_CACHE_TTL = 1800 # 30 min
_archive_lock = threading.Lock()
_archive_refreshing = False
def _load_archive_cache():
"""Aus Datei-Cache lesen (sofort, kein Netzwerk)."""
try:
if _ARCHIVE_CACHE_FILE.exists():
data = json.loads(_ARCHIVE_CACHE_FILE.read_text())
return data.get("entries", []), float(data.get("ts", 0))
except Exception:
pass
return [], 0.0
def _save_archive_cache(entries):
try:
_ARCHIVE_CACHE_FILE.write_text(
json.dumps({"ts": datetime.now().timestamp(), "entries": entries},
ensure_ascii=False))
except Exception:
pass
def _refresh_archive_bg():
"""Save.TV-Archiv im Hintergrund aktualisieren."""
global _archive_refreshing
with _archive_lock:
if _archive_refreshing:
return
_archive_refreshing = True
try:
entries = savetv._get_full_archive()
if entries:
_save_archive_cache(entries)
except Exception:
pass
finally:
_archive_refreshing = False
def _get_archive_cached():
"""Sofort aus Cache zurückgeben, Hintergrund-Refresh anstoßen wenn nötig."""
entries, ts = _load_archive_cache()
age = datetime.now().timestamp() - ts
if age > _ARCHIVE_CACHE_TTL or not entries:
t = threading.Thread(target=_refresh_archive_bg, daemon=True)
t.start()
return entries
def _load_download_log():
if DOWNLOAD_LOG.exists():
try:
return json.loads(DOWNLOAD_LOG.read_text())
except Exception:
pass
return {}
def _save_download_log(log):
try:
DOWNLOAD_LOG.write_text(json.dumps(log, ensure_ascii=False, indent=2))
except Exception:
pass
DOWNLOAD_PROGRESS = Path("/mnt/savetv/.download_progress.json")
DOWNLOAD_DIR = Path("/mnt/savetv")
_PROGRESS_LOCK = threading.Lock()
def _load_progress_raw():
"""Liest Progress-Datei ohne Lock (nur innerhalb von _PROGRESS_LOCK aufrufen)."""
if DOWNLOAD_PROGRESS.exists():
try:
return json.loads(DOWNLOAD_PROGRESS.read_text())
except Exception:
pass
return {}
def _save_progress_raw(prog):
"""Schreibt Progress-Datei ohne Lock (nur innerhalb von _PROGRESS_LOCK aufrufen)."""
DOWNLOAD_PROGRESS.write_text(json.dumps(prog, ensure_ascii=False, indent=2))
def _load_progress():
with _PROGRESS_LOCK:
return _load_progress_raw()
def _save_progress(prog):
with _PROGRESS_LOCK:
try:
_save_progress_raw(prog)
except Exception:
pass
def _head_content_length(url):
try:
req = urllib.request.Request(url, method='HEAD')
with urllib.request.urlopen(req, timeout=10) as resp:
return int(resp.headers.get('Content-Length', 0))
except Exception:
return 0
HTML = r"""
Save.TV Archiv
Lade Archiv von Save.TV...
"""
@app.route("/")
def index():
return render_template_string(HTML)
@app.route("/api/films")
def api_films():
entries = savetv._get_full_archive()
seen_titles = {}
series_count = 0
excluded_count = 0
filminfo_cache = savetv_country_filter.load_filminfo_cache()
for e in entries:
tc = e.get("STRTELECASTENTRY", {})
if tc.get("SFOLGE", ""):
series_count += 1
continue
title = tc.get("STITLE", "?")
if savetv._is_excluded(title):
excluded_count += 1
continue
if savetv_country_filter.should_exclude_production_country(title, filminfo_cache):
excluded_count += 1
continue
station = tc.get("STVSTATIONNAME", "?")
days_left = int(tc.get("IDAYSLEFTBEFOREDELETE", 0))
tid = int(tc.get("ITELECASTID", 0))
start_date = tc.get("DSTARTDATE", "")
is_cinema = savetv._is_known_cinema(title)
key = title.lower().strip()
if key in seen_titles:
if days_left > seen_titles[key]["days_left"]:
seen_titles[key].update(days_left=days_left, tid=tid, start_date=start_date)
continue
seen_titles[key] = {
"tid": tid, "title": title, "station": station,
"days_left": days_left, "cinema": is_cinema,
"start_date": start_date,
}
films = sorted(seen_titles.values(), key=lambda x: x.get("start_date", ""), reverse=True)
dl_log = _load_download_log()
import re as _re
def _norm_title(s):
return _re.sub(r'\s+', ' ', _re.sub(r'[^\w\s]', ' ', s)).strip().lower()
stored_norms = set()
for fp in DOWNLOAD_DIR.iterdir():
if fp.suffix == ".mp4" and fp.stat().st_size > 1_000_000:
raw = fp.stem.rsplit(" (", 1)[0]
stored_norms.add(_norm_title(raw))
return jsonify({
"films": films,
"total": len(entries),
"kino": sum(1 for f in films if f["cinema"]),
"urgent": sum(1 for f in films if f["days_left"] <= 7),
"downloads": dl_log,
"stored_titles": list(stored_norms),
})
@app.route("/api/download", methods=["POST"])
def api_download():
data = request.get_json()
tids = data.get("tids", [])
dl_log = _load_download_log()
progress = _load_progress()
results = []
films_by_tid = {}
entries = savetv._get_full_archive()
for e in entries:
tc = e.get("STRTELECASTENTRY", {})
tid = int(tc.get("ITELECASTID", 0))
if tid:
films_by_tid[tid] = tc.get("STITLE", f"film_{tid}")
def download_one(tid):
title = films_by_tid.get(tid, f"film_{tid}")
url, err = savetv._get_download_url(tid)
if err:
try:
url, err = savetv._get_download_url(tid, fmt=savetv.DOWNLOAD_FORMAT_SD)
except AttributeError:
pass
if err:
return {"tid": tid, "ok": False, "error": f"URL-Fehler: {err}"}
expected_bytes = _head_content_length(url)
safe_title = _re.sub(r'[^\w\-.]', '_', title)[:80]
filename = f"{safe_title}_{tid}.mp4"
target = DOWNLOAD_DIR / filename
with _PROGRESS_LOCK:
cur_progress = _load_progress_raw()
cur_progress[str(tid)] = {
"filename": filename,
"expected_bytes": expected_bytes,
"started_at": datetime.now().isoformat(),
}
_save_progress_raw(cur_progress)
DOWNLOAD_DIR.mkdir(parents=True, exist_ok=True)
try:
subprocess.Popen(
["wget", "-q", "-O", str(target), url],
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL,
start_new_session=True,
)
except Exception as e:
with _PROGRESS_LOCK:
cur_progress = _load_progress_raw()
cur_progress.pop(str(tid), None)
_save_progress_raw(cur_progress)
return {"tid": tid, "ok": False, "error": f"wget: {e}"}
dl_log[str(tid)] = "running"
return {"tid": tid, "ok": True, "filename": filename}
threads = []
result_list = [None] * len(tids)
def worker(i, tid):
result_list[i] = download_one(tid)
for i, tid in enumerate(tids):
t = threading.Thread(target=worker, args=(i, tid))
t.daemon = True
t.start()
threads.append(t)
for t in threads:
t.join(timeout=30)
results = [r for r in result_list if r]
_save_download_log(dl_log)
return jsonify({"results": results})
# Extra-Routes (Downloads, Status, Health) - lokal in /opt/savetv_extra_routes.py
try:
from savetv_extra_routes import register_extra_routes
register_extra_routes(app,
progress_lock=_PROGRESS_LOCK,
load_progress_raw=_load_progress_raw,
save_progress_raw=_save_progress_raw)
except ImportError:
pass
if __name__ == "__main__":
app.run(host="0.0.0.0", port=8765, debug=False, threaded=True)