- /downloads ersetzt durch Pipeline-Dashboard - /api/pipeline: Status aller Filme (pending/bereit/auf NAS) - /api/nas_synced: Callback wenn Jellyfin-Sync fertig - Sync-Script meldet sich nach erfolg zurück ans CT
812 lines
34 KiB
Python
812 lines
34 KiB
Python
"""Extra Routes fuer savetv_web.py - nicht im Git, lokal in CT 116."""
|
||
|
||
import html as _html
|
||
import json as _json
|
||
import os as _os
|
||
import re as _re
|
||
import shutil
|
||
import time as _time
|
||
from pathlib import Path
|
||
from urllib.parse import quote as _urlquote
|
||
from flask import send_from_directory, request, jsonify
|
||
|
||
SAVETV_DIR = Path("/mnt/savetv")
|
||
|
||
JELLYFIN_CACHE = SAVETV_DIR / ".jellyfin_cache.json"
|
||
JELLYFIN_TTL = 3600
|
||
JELLYFIN_URL = _os.environ.get("JELLYFIN_URL", "http://100.77.105.3:8096")
|
||
JELLYFIN_USER = _os.environ.get("JELLYFIN_USER", "admin")
|
||
JELLYFIN_PASS = _os.environ.get("JELLYFIN_PASS", "astral66")
|
||
|
||
# Direkter Download ohne Cloudflare (Hetzner :9443 → nginx → CT 116)
|
||
SAVETV_DIRECT_BASE = _os.environ.get("SAVETV_DIRECT_BASE", "http://138.201.84.95:9443")
|
||
SAVETV_TUNNEL_BASE = _os.environ.get("SAVETV_TUNNEL_BASE", "https://savetv.orbitalo.net")
|
||
SAVETV_DIRECT_USER = _os.environ.get("SAVETV_DIRECT_USER", "mike")
|
||
SAVETV_DIRECT_PASS = _os.environ.get("SAVETV_DIRECT_PASS", "astral66")
|
||
|
||
|
||
def _normalize_film_title(s: str) -> str:
|
||
"""Gleiche Logik wie normTitle() in savetv_web.js fuer Abgleich."""
|
||
if not s:
|
||
return ""
|
||
s = _re.sub(r"[^\w\s]", " ", s, flags=_re.UNICODE)
|
||
s = _re.sub(r"\s+", " ", s).strip().lower()
|
||
return s
|
||
|
||
|
||
def _strip_trailing_year_in_parens(name: str) -> str:
|
||
return _re.sub(r"\s*\(\d{4}\)\s*$", "", name or "").strip()
|
||
|
||
|
||
def _jellyfin_fetch_library():
|
||
"""Holt alle Film-Titel aus Jellyfin, normalisiert, mit 1h File-Cache."""
|
||
import requests as _rq
|
||
|
||
now = _time.time()
|
||
if JELLYFIN_CACHE.exists():
|
||
try:
|
||
data = _json.loads(JELLYFIN_CACHE.read_text())
|
||
if now - float(data.get("ts", 0)) < JELLYFIN_TTL:
|
||
return {
|
||
"normalized_titles": data.get("normalized_titles", []),
|
||
"count": int(data.get("count", 0)),
|
||
"cached": True,
|
||
}
|
||
except Exception:
|
||
pass
|
||
|
||
r = _rq.post(
|
||
f"{JELLYFIN_URL}/Users/AuthenticateByName",
|
||
headers={
|
||
"Content-Type": "application/json",
|
||
"X-Emby-Authorization": 'MediaBrowser Client="SaveTV", Device="CT116", DeviceId="savetv-jf", Version="1.0"',
|
||
},
|
||
json={"Username": JELLYFIN_USER, "Pw": JELLYFIN_PASS},
|
||
timeout=30,
|
||
)
|
||
r.raise_for_status()
|
||
token = r.json()["AccessToken"]
|
||
|
||
r2 = _rq.get(
|
||
f"{JELLYFIN_URL}/Items",
|
||
params={
|
||
"IncludeItemTypes": "Movie",
|
||
"Recursive": "true",
|
||
"Fields": "ProductionYear",
|
||
"Limit": 10000,
|
||
"StartIndex": 0,
|
||
},
|
||
headers={"X-Emby-Token": token},
|
||
timeout=120,
|
||
)
|
||
r2.raise_for_status()
|
||
payload = r2.json()
|
||
items = payload.get("Items", [])
|
||
normalized = set()
|
||
for it in items:
|
||
name = it.get("Name") or ""
|
||
if not name:
|
||
continue
|
||
clean = _strip_trailing_year_in_parens(name)
|
||
key = _normalize_film_title(clean)
|
||
if key:
|
||
normalized.add(key)
|
||
|
||
sorted_list = sorted(normalized)
|
||
out = {"ts": now, "normalized_titles": sorted_list, "count": len(normalized)}
|
||
try:
|
||
JELLYFIN_CACHE.write_text(_json.dumps(out, ensure_ascii=False, indent=2))
|
||
except Exception:
|
||
pass
|
||
|
||
return {"normalized_titles": sorted_list, "count": len(normalized), "cached": False}
|
||
|
||
|
||
def register_extra_routes(app, progress_lock=None, load_progress_raw=None, save_progress_raw=None):
|
||
import threading as _threading
|
||
_plock = progress_lock if progress_lock is not None else _threading.Lock()
|
||
|
||
def _load_prog():
|
||
if load_progress_raw is not None:
|
||
return load_progress_raw()
|
||
pf = SAVETV_DIR / ".download_progress.json"
|
||
import json as _j
|
||
return _j.loads(pf.read_text()) if pf.exists() else {}
|
||
|
||
def _save_prog(prog):
|
||
if save_progress_raw is not None:
|
||
save_progress_raw(prog)
|
||
else:
|
||
import json as _j
|
||
(SAVETV_DIR / ".download_progress.json").write_text(_j.dumps(prog, ensure_ascii=False, indent=2))
|
||
|
||
@app.route("/files/<path:filename>")
|
||
def serve_file(filename):
|
||
return send_from_directory(str(SAVETV_DIR), filename, as_attachment=True)
|
||
|
||
@app.route("/api/delete", methods=["POST"])
|
||
def api_delete():
|
||
data = request.get_json()
|
||
filename = data.get("filename", "")
|
||
if not filename or ".." in filename or "/" in filename:
|
||
return jsonify({"ok": False, "error": "Ungueltig"}), 400
|
||
target = SAVETV_DIR / filename
|
||
if not target.exists():
|
||
return jsonify({"ok": False, "error": "Nicht gefunden"}), 404
|
||
try:
|
||
target.unlink()
|
||
return jsonify({"ok": True, "deleted": filename})
|
||
except Exception as e:
|
||
return jsonify({"ok": False, "error": str(e)}), 500
|
||
|
||
@app.route("/api/jellyfin_library")
|
||
def api_jellyfin_library():
|
||
try:
|
||
result = _jellyfin_fetch_library()
|
||
return jsonify(
|
||
{
|
||
"ok": True,
|
||
"normalized_titles": result["normalized_titles"],
|
||
"count": result["count"],
|
||
"cached": result.get("cached", False),
|
||
}
|
||
)
|
||
except Exception as e:
|
||
return (
|
||
jsonify(
|
||
{
|
||
"ok": False,
|
||
"error": str(e),
|
||
"normalized_titles": [],
|
||
"count": 0,
|
||
"cached": False,
|
||
}
|
||
),
|
||
500,
|
||
)
|
||
|
||
NAS_DONE_FILE = SAVETV_DIR / ".nas_done.json"
|
||
NAS_SYNC_MIN_H = 12.0
|
||
NAS_SYNC_JITTER_H = 0.5 # 0-30 min
|
||
|
||
def _load_nas_done():
|
||
try:
|
||
if NAS_DONE_FILE.exists():
|
||
return _json.loads(NAS_DONE_FILE.read_text())
|
||
except Exception:
|
||
pass
|
||
return {}
|
||
|
||
def _save_nas_done(d):
|
||
try:
|
||
NAS_DONE_FILE.write_text(_json.dumps(d, ensure_ascii=False, indent=2))
|
||
except Exception:
|
||
pass
|
||
|
||
@app.route("/api/nas_synced", methods=["POST"])
|
||
def api_nas_synced():
|
||
data = request.get_json(silent=True) or {}
|
||
name = data.get("name", "").strip()
|
||
if not name:
|
||
return jsonify({"ok": False}), 400
|
||
done = _load_nas_done()
|
||
from datetime import datetime as _dt
|
||
done[name] = _dt.now().strftime("%Y-%m-%dT%H:%M:%S")
|
||
_save_nas_done(done)
|
||
return jsonify({"ok": True})
|
||
|
||
@app.route("/api/pipeline")
|
||
def api_pipeline():
|
||
import math as _math
|
||
now = _time.time()
|
||
done = _load_nas_done()
|
||
hetzner, pending, transferred = [], [], []
|
||
try:
|
||
for fp in SAVETV_DIR.iterdir():
|
||
if fp.suffix != ".mp4":
|
||
continue
|
||
st = fp.stat()
|
||
if st.st_size < 1_000_000:
|
||
continue
|
||
age_h = (now - st.st_mtime) / 3600
|
||
size_mb = round(st.st_size / 1024 / 1024, 1)
|
||
name = fp.name
|
||
clean = name.rsplit(".", 1)[0]
|
||
if name in done:
|
||
continue
|
||
eta_h = max(0, NAS_SYNC_MIN_H - age_h)
|
||
entry = {"name": name, "clean": clean,
|
||
"size_mb": size_mb, "age_h": round(age_h, 2),
|
||
"mtime": int(st.st_mtime)}
|
||
if eta_h > 0:
|
||
entry["eta_h"] = round(eta_h, 2)
|
||
pending.append(entry)
|
||
else:
|
||
hetzner.append(entry)
|
||
except Exception as e:
|
||
pass
|
||
hetzner.sort(key=lambda x: x["mtime"], reverse=True)
|
||
pending.sort(key=lambda x: x.get("eta_h", 0))
|
||
from datetime import datetime as _dt
|
||
for name, ts in sorted(done.items(), key=lambda x: x[1], reverse=True)[:20]:
|
||
clean = name.rsplit(".", 1)[0]
|
||
transferred.append({"name": name, "clean": clean, "synced_at": ts})
|
||
return jsonify({"hetzner": hetzner, "pending": pending, "transferred": transferred,
|
||
"total_gb": round(sum(f["size_mb"] for f in hetzner + pending) / 1024, 2)})
|
||
|
||
@app.route("/downloads")
|
||
def downloads_page():
|
||
return '''<!DOCTYPE html>
|
||
<html lang="de">
|
||
<head>
|
||
<meta charset="UTF-8">
|
||
<meta name="viewport" content="width=device-width,initial-scale=1">
|
||
<title>SaveTV Pipeline</title>
|
||
<style>
|
||
*{box-sizing:border-box;margin:0;padding:0}
|
||
body{background:#0e0e16;color:#e0e0ec;font-family:system-ui,-apple-system,sans-serif;font-size:15px;line-height:1.5;padding:24px 20px}
|
||
nav{display:flex;gap:16px;margin-bottom:28px;flex-wrap:wrap}
|
||
nav a{color:#666;text-decoration:none;font-size:14px;padding:6px 12px;border-radius:6px;border:1px solid #2a2a3a;transition:.15s}
|
||
nav a:hover{color:#ccc;border-color:#444}
|
||
nav a.active{color:#4caf92;border-color:#4caf92}
|
||
h1{font-size:24px;font-weight:700;margin-bottom:4px}
|
||
.meta{color:#555;font-size:13px;margin-bottom:28px}
|
||
.block{margin-bottom:28px;border-radius:10px;overflow:hidden;border:1px solid #1e1e2e}
|
||
.block-header{display:flex;align-items:center;gap:10px;padding:14px 18px;font-weight:700;font-size:15px}
|
||
.block-header .count{margin-left:auto;font-size:13px;font-weight:400;color:#666}
|
||
.bh-green{background:#0d1f14;border-bottom:1px solid #1e2e20}
|
||
.bh-orange{background:#1f1a0d;border-bottom:1px solid #2e260e}
|
||
.bh-blue{background:#0d1220;border-bottom:1px solid #1a2030}
|
||
.film{display:flex;align-items:center;gap:12px;padding:12px 18px;border-bottom:1px solid #16161e;transition:.12s}
|
||
.film:last-child{border-bottom:none}
|
||
.film:hover{background:#131320}
|
||
.film-name{flex:1;font-size:15px;font-weight:500;min-width:0;overflow:hidden;text-overflow:ellipsis;white-space:nowrap}
|
||
.film-meta{font-size:12px;color:#555;white-space:nowrap;flex-shrink:0}
|
||
.badge{font-size:11px;font-weight:700;padding:3px 8px;border-radius:4px;white-space:nowrap;flex-shrink:0}
|
||
.badge-green{background:#0d2a18;color:#4caf92;border:1px solid #2a5a3a}
|
||
.badge-orange{background:#2a1e08;color:#ffa726;border:1px solid #5a3a10}
|
||
.badge-blue{background:#0a1830;color:#5a9af8;border:1px solid #1a3060}
|
||
.empty{padding:20px 18px;color:#444;font-style:italic;font-size:14px}
|
||
.spinner{display:inline-block;width:14px;height:14px;border:2px solid #333;border-top-color:#4caf92;border-radius:50%;animation:spin .8s linear infinite;vertical-align:middle;margin-right:6px}
|
||
@keyframes spin{to{transform:rotate(360deg)}}
|
||
.del-btn{background:none;border:none;color:#444;cursor:pointer;font-size:16px;padding:2px 6px;border-radius:4px;transition:.12s;flex-shrink:0}
|
||
.del-btn:hover{color:#ff5555;background:#2a1010}
|
||
.refresh{color:#444;font-size:12px;margin-left:auto}
|
||
</style>
|
||
</head>
|
||
<body>
|
||
<nav>
|
||
<a href="/">← Archiv</a>
|
||
<a href="/downloads" class="active">▶ Pipeline</a>
|
||
<a href="/status">⚙ Status</a>
|
||
</nav>
|
||
<h1>▶ SaveTV Pipeline</h1>
|
||
<div class="meta" id="meta">Lade...</div>
|
||
|
||
<div class="block">
|
||
<div class="block-header bh-orange">
|
||
<span>⏰ Wartet auf NAS-Transfer</span>
|
||
<span class="count" id="cnt-pending">–</span>
|
||
</div>
|
||
<div id="list-pending"><div class="empty">Lade...</div></div>
|
||
</div>
|
||
|
||
<div class="block">
|
||
<div class="block-header bh-green">
|
||
<span>💾 Auf Hetzner bereit</span>
|
||
<span class="count" id="cnt-hetzner">–</span>
|
||
</div>
|
||
<div id="list-hetzner"><div class="empty">Lade...</div></div>
|
||
</div>
|
||
|
||
<div class="block">
|
||
<div class="block-header bh-blue">
|
||
<span>✓ Auf deiner NAS</span>
|
||
<span class="count" id="cnt-done">–</span>
|
||
</div>
|
||
<div id="list-done"><div class="empty">Lade...</div></div>
|
||
</div>
|
||
|
||
<script>
|
||
function fmtEta(h){
|
||
if(h<=0)return'bereit';
|
||
var t=Math.round(h*60);
|
||
if(t<60)return'noch '+t+' min';
|
||
return'noch '+(h).toFixed(1).replace('.',',')+' h';
|
||
}
|
||
function fmtSize(mb){
|
||
if(mb>=1024)return(mb/1024).toFixed(1)+' GB';
|
||
return mb+' MB';
|
||
}
|
||
function fmtTs(ts){
|
||
if(!ts)return'';
|
||
var d=new Date(ts);
|
||
return d.toLocaleDateString('de-DE',{day:'2-digit',month:'2-digit'})+' '+
|
||
d.toLocaleTimeString('de-DE',{hour:'2-digit',minute:'2-digit'});
|
||
}
|
||
function delFile(name,btn){
|
||
if(!confirm('Wirklich löschen?\\n'+name))return;
|
||
fetch('/api/delete',{method:'POST',headers:{'Content-Type':'application/json'},
|
||
body:JSON.stringify({filename:name})})
|
||
.then(r=>r.json()).then(d=>{
|
||
if(d.ok)btn.closest('.film').remove();
|
||
else alert(d.error);
|
||
});
|
||
}
|
||
function load(){
|
||
fetch('/api/pipeline').then(r=>r.json()).then(d=>{
|
||
var total=d.hetzner.length+d.pending.length;
|
||
document.getElementById('meta').textContent=
|
||
total+' Filme auf Hetzner \u00b7 '+d.total_gb+' GB \u00b7 '+
|
||
'aktualisiert '+new Date().toLocaleTimeString('de-DE',{hour:'2-digit',minute:'2-digit'});
|
||
|
||
// Pending
|
||
document.getElementById('cnt-pending').textContent=d.pending.length+' Film'+(d.pending.length!==1?'e':'');
|
||
var hp=document.getElementById('list-pending');
|
||
if(!d.pending.length){hp.innerHTML='<div class="empty">Keine Filme warten auf Transfer</div>';}
|
||
else{hp.innerHTML=d.pending.map(f=>
|
||
'<div class="film">'+
|
||
'<div class="film-name">'+f.clean+'</div>'+
|
||
'<div class="film-meta">'+fmtSize(f.size_mb)+'</div>'+
|
||
'<span class="badge badge-orange">'+fmtEta(f.eta_h)+'</span>'+
|
||
'</div>'
|
||
).join('');}
|
||
|
||
// Hetzner bereit
|
||
document.getElementById('cnt-hetzner').textContent=d.hetzner.length+' Film'+(d.hetzner.length!==1?'e':'');
|
||
var hh=document.getElementById('list-hetzner');
|
||
if(!d.hetzner.length){hh.innerHTML='<div class="empty">Keine Filme bereit (Transfer l\u00e4uft gleich)</div>';}
|
||
else{hh.innerHTML=d.hetzner.map(f=>
|
||
'<div class="film">'+
|
||
'<div class="film-name">'+f.clean+'</div>'+
|
||
'<div class="film-meta">'+fmtSize(f.size_mb)+' \u00b7 '+f.age_h.toFixed(1)+' h alt</div>'+
|
||
'<span class="badge badge-green">Bereit</span>'+
|
||
'<button class="del-btn" title="L\\u00f6schen" onclick="delFile('+JSON.stringify(f.name)+',this)">🗑</button>'+
|
||
'</div>'
|
||
).join('');}
|
||
|
||
// NAS done
|
||
document.getElementById('cnt-done').textContent=d.transferred.length+' Film'+(d.transferred.length!==1?'e':'');
|
||
var hd=document.getElementById('list-done');
|
||
if(!d.transferred.length){hd.innerHTML='<div class="empty">Noch keine Filme auf NAS angekommen</div>';}
|
||
else{hd.innerHTML=d.transferred.map(f=>
|
||
'<div class="film">'+
|
||
'<div class="film-name">'+f.clean+'</div>'+
|
||
'<div class="film-meta">'+fmtTs(f.synced_at)+'</div>'+
|
||
'<span class="badge badge-blue">✓ NAS</span>'+
|
||
'</div>'
|
||
).join('');}
|
||
}).catch(e=>{
|
||
document.getElementById('meta').textContent='Fehler beim Laden: '+e;
|
||
});
|
||
}
|
||
load();
|
||
setInterval(load,30000);
|
||
</script>
|
||
</body>
|
||
</html>'''
|
||
|
||
|
||
FILMINFO_CACHE = Path("/mnt/savetv/.filminfo_cache.json")
|
||
BOGUS_GENRES = {"Stummfilm", "Tonfilm", "Farbfilm", "Schwarzweissfilm",
|
||
"Langfilm", "Kurzfilm", "Independentfilm"}
|
||
|
||
def _load_filminfo_cache():
|
||
if FILMINFO_CACHE.exists():
|
||
try:
|
||
import json as _json
|
||
return _json.loads(FILMINFO_CACHE.read_text())
|
||
except Exception:
|
||
pass
|
||
return {}
|
||
|
||
def _save_filminfo_cache(cache):
|
||
import json as _json
|
||
FILMINFO_CACHE.write_text(_json.dumps(cache, ensure_ascii=False, indent=1))
|
||
|
||
def _wikidata_lookup(title):
|
||
"""Lookup year/genre/country for a film title via Wikidata."""
|
||
import requests as _rq
|
||
import re
|
||
|
||
search_title = re.sub(r"\s*[-\u2013\u2014]\s*.+$", "", title).strip()
|
||
result = {"year": "", "genres": [], "countries": []}
|
||
|
||
def _parse_bindings(bindings):
|
||
year = ""
|
||
genres = set()
|
||
countries = set()
|
||
for b in bindings:
|
||
if not year and b.get("year", {}).get("value"):
|
||
year = b["year"]["value"]
|
||
if b.get("genreLabel", {}).get("value"):
|
||
genres.add(b["genreLabel"]["value"])
|
||
if b.get("countryLabel", {}).get("value"):
|
||
countries.add(b["countryLabel"]["value"])
|
||
return year, sorted(genres)[:3], sorted(countries)[:2]
|
||
|
||
for lang in ["de", "en"]:
|
||
sparql = ('SELECT ?year ?genreLabel ?countryLabel WHERE {{ '
|
||
'?film wdt:P31 wd:Q11424 . '
|
||
'?film rdfs:label "{t}"@{l} . '
|
||
'OPTIONAL {{ ?film wdt:P577 ?date }} '
|
||
'OPTIONAL {{ ?film wdt:P136 ?genre }} '
|
||
'OPTIONAL {{ ?film wdt:P495 ?country }} '
|
||
'BIND(YEAR(?date) AS ?year) '
|
||
'SERVICE wikibase:label {{ bd:serviceParam wikibase:language "de,en" }} '
|
||
'}} LIMIT 20').format(t=search_title.replace('"', '\\"'), l=lang)
|
||
try:
|
||
r = _rq.get("https://query.wikidata.org/sparql",
|
||
params={"query": sparql, "format": "json"},
|
||
headers={"User-Agent": "SaveTV/1.0"}, timeout=8)
|
||
bindings = r.json().get("results", {}).get("bindings", [])
|
||
if bindings:
|
||
y, g, c = _parse_bindings(bindings)
|
||
return {"year": y, "genres": [x for x in g if x not in BOGUS_GENRES], "countries": c}
|
||
except Exception:
|
||
pass
|
||
|
||
# Fallback: Wikidata search
|
||
try:
|
||
sr = _rq.get("https://www.wikidata.org/w/api.php",
|
||
params={"action": "wbsearchentities", "search": search_title,
|
||
"language": "de", "type": "item", "limit": "3", "format": "json"},
|
||
headers={"User-Agent": "SaveTV/1.0"}, timeout=8)
|
||
for item in sr.json().get("search", []):
|
||
qid = item.get("id", "")
|
||
sparql_q = ('SELECT ?year ?genreLabel ?countryLabel WHERE {{ '
|
||
'BIND(wd:{qid} AS ?film) '
|
||
'?film wdt:P31 wd:Q11424 . '
|
||
'OPTIONAL {{ ?film wdt:P577 ?date }} '
|
||
'OPTIONAL {{ ?film wdt:P136 ?genre }} '
|
||
'OPTIONAL {{ ?film wdt:P495 ?country }} '
|
||
'BIND(YEAR(?date) AS ?year) '
|
||
'SERVICE wikibase:label {{ bd:serviceParam wikibase:language "de,en" }} '
|
||
'}} LIMIT 20').format(qid=qid)
|
||
r2 = _rq.get("https://query.wikidata.org/sparql",
|
||
params={"query": sparql_q, "format": "json"},
|
||
headers={"User-Agent": "SaveTV/1.0"}, timeout=8)
|
||
bindings = r2.json().get("results", {}).get("bindings", [])
|
||
if bindings:
|
||
y, g, c = _parse_bindings(bindings)
|
||
return {"year": y, "genres": [x for x in g if x not in BOGUS_GENRES], "countries": c}
|
||
except Exception:
|
||
pass
|
||
|
||
return result
|
||
|
||
@app.route("/api/filminfo")
|
||
def api_filminfo():
|
||
title = request.args.get("title", "").strip()
|
||
if not title:
|
||
return jsonify({"error": "title missing"}), 400
|
||
cache = _load_filminfo_cache()
|
||
if title in cache:
|
||
return jsonify(cache[title])
|
||
info = _wikidata_lookup(title)
|
||
cache[title] = info
|
||
_save_filminfo_cache(cache)
|
||
return jsonify(info)
|
||
|
||
@app.route("/api/filminfo_batch", methods=["POST"])
|
||
def api_filminfo_batch():
|
||
data = request.get_json()
|
||
titles = data.get("titles", [])
|
||
cache = _load_filminfo_cache()
|
||
results = {}
|
||
missing = []
|
||
for t in titles:
|
||
if t in cache:
|
||
results[t] = cache[t]
|
||
else:
|
||
missing.append(t)
|
||
for t in missing:
|
||
info = _wikidata_lookup(t)
|
||
cache[t] = info
|
||
results[t] = info
|
||
if missing:
|
||
_save_filminfo_cache(cache)
|
||
return jsonify(results)
|
||
|
||
@app.route("/api/download_progress")
|
||
def api_download_progress():
|
||
import json as _json
|
||
import subprocess as _sp
|
||
progress_file = SAVETV_DIR / ".download_progress.json"
|
||
if not progress_file.exists():
|
||
return jsonify({})
|
||
try:
|
||
progress = _json.loads(progress_file.read_text())
|
||
except Exception:
|
||
return jsonify({})
|
||
|
||
dl_log_file = SAVETV_DIR / ".download_log.json"
|
||
try:
|
||
dl_log = _json.loads(dl_log_file.read_text()) if dl_log_file.exists() else {}
|
||
except Exception:
|
||
dl_log = {}
|
||
|
||
# Stale "running"-Eintraege bereinigen: im Log als running, aber kein Progress-Eintrag
|
||
# und kein wget-Prozess → Download ist gescheitert, Eintrag entfernen
|
||
stale = []
|
||
for tid, status in list(dl_log.items()):
|
||
if status != "running":
|
||
continue
|
||
if tid in progress:
|
||
continue
|
||
# Pruefen ob wget fuer diesen TID noch laeuft
|
||
try:
|
||
chk = _sp.run(["pgrep", "-af", f"_{tid}.mp4"],
|
||
capture_output=True, text=True, timeout=3)
|
||
if "wget" in chk.stdout:
|
||
continue
|
||
except Exception:
|
||
pass
|
||
stale.append(tid)
|
||
if stale:
|
||
for tid in stale:
|
||
dl_log.pop(tid, None)
|
||
dl_log_file.write_text(_json.dumps(dl_log, ensure_ascii=False, indent=2))
|
||
|
||
result = {}
|
||
completed = []
|
||
for tid, info in list(progress.items()):
|
||
fp = SAVETV_DIR / info["filename"]
|
||
current = fp.stat().st_size if fp.exists() else 0
|
||
expected = info.get("expected_bytes", 0)
|
||
|
||
wget_running = False
|
||
try:
|
||
ps = _sp.run(["pgrep", "-af", info["filename"]],
|
||
capture_output=True, text=True, timeout=3)
|
||
wget_running = "wget" in ps.stdout
|
||
except Exception:
|
||
pass
|
||
|
||
done = False
|
||
if expected > 0 and current >= expected:
|
||
done = True
|
||
elif not wget_running and current > 100_000:
|
||
done = True
|
||
|
||
percent = round(current / expected * 100, 1) if expected > 0 else 0
|
||
result[tid] = {
|
||
"current_bytes": current,
|
||
"expected_bytes": expected,
|
||
"percent": min(percent, 100),
|
||
"current_mb": round(current / 1024 / 1024, 1),
|
||
"expected_mb": round(expected / 1024 / 1024, 1),
|
||
"done": done,
|
||
}
|
||
|
||
if done:
|
||
completed.append(tid)
|
||
|
||
if completed:
|
||
for tid in completed:
|
||
info = progress.get(tid, {})
|
||
raw_filename = info.get("filename", "")
|
||
# Rename: "Titel_ID.mp4" -> "Titel (Jahr).mp4"
|
||
if raw_filename:
|
||
_rename_to_jellyfin(raw_filename, tid)
|
||
# Auto-Delete aus Save.TV Archiv
|
||
try:
|
||
import sys as _sys
|
||
_sys.path.insert(0, '/opt/homelab-ai-bot')
|
||
from tools import savetv as _savetv
|
||
ok, err = _savetv._delete_telecast(int(tid))
|
||
if ok:
|
||
import logging as _log
|
||
_log.getLogger("savetv").info("Archiv-Eintrag %s nach Download gelöscht", tid)
|
||
except Exception as _e:
|
||
import logging as _log
|
||
_log.getLogger("savetv").warning("Archiv-Delete TID %s fehlgeschlagen: %s", tid, _e)
|
||
dl_log[tid] = "done"
|
||
with _plock:
|
||
cur = _load_prog()
|
||
for tid in completed:
|
||
cur.pop(tid, None)
|
||
_save_prog(cur)
|
||
dl_log_file.write_text(_json.dumps(dl_log, ensure_ascii=False, indent=2))
|
||
|
||
return jsonify(result)
|
||
|
||
def _find_cache_match(cache, clean_title):
|
||
"""Sucht den besten Cache-Eintrag: exakt, dann normalisiert (Sonderzeichen-tolerant)."""
|
||
if clean_title in cache and cache[clean_title].get("year"):
|
||
return cache[clean_title]
|
||
import re as _re2
|
||
def _norm(s):
|
||
return _re2.sub(r'\s+', ' ', _re2.sub(r'[^\w\s]', ' ', s)).strip().lower()
|
||
norm = _norm(clean_title)
|
||
for key, val in cache.items():
|
||
if not val.get("year"):
|
||
continue
|
||
if _norm(key) == norm:
|
||
return val
|
||
return None
|
||
|
||
def _rename_to_jellyfin(raw_filename, tid):
|
||
"""Benennt fertig gedownloadete Datei von 'Titel_ID.mp4' zu 'Titel (Jahr).mp4' um."""
|
||
import re as _re
|
||
src = SAVETV_DIR / raw_filename
|
||
if not src.exists():
|
||
return
|
||
|
||
m = _re.match(r'^(.+)_(\d{6,9})\.mp4$', raw_filename)
|
||
if not m:
|
||
return
|
||
raw_title_part = m.group(1)
|
||
|
||
clean_title = raw_title_part.replace('_-_', ' - ').replace('_', ' ').strip()
|
||
|
||
cache = _load_filminfo_cache()
|
||
matched = _find_cache_match(cache, clean_title)
|
||
if matched:
|
||
year = matched.get("year", "")
|
||
else:
|
||
if clean_title not in cache:
|
||
cache[clean_title] = _wikidata_lookup(clean_title)
|
||
_save_filminfo_cache(cache)
|
||
year = cache[clean_title].get("year", "")
|
||
|
||
# Zieldateiname bauen
|
||
safe_title = _re.sub(r'[\\/:*?"<>|]', '', clean_title).strip()
|
||
if year:
|
||
dest_name = f"{safe_title} ({year}).mp4"
|
||
else:
|
||
dest_name = f"{safe_title}.mp4"
|
||
|
||
dest = SAVETV_DIR / dest_name
|
||
|
||
# Nicht überschreiben falls schon vorhanden
|
||
if dest.exists():
|
||
# Alten Raw-File löschen
|
||
try:
|
||
src.unlink()
|
||
except Exception:
|
||
pass
|
||
return
|
||
|
||
try:
|
||
src.rename(dest)
|
||
# Auch Progress-Info aktualisieren
|
||
import logging
|
||
logging.getLogger("savetv").info(f"Umbenannt: {raw_filename} -> {dest_name}")
|
||
except Exception as e:
|
||
import logging
|
||
logging.getLogger("savetv").warning(f"Rename fehlgeschlagen {raw_filename}: {e}")
|
||
|
||
@app.route("/health")
|
||
def health():
|
||
from tools import savetv
|
||
checks = {}
|
||
ok_count = 0
|
||
total = 0
|
||
total += 1
|
||
try:
|
||
s = savetv._get_session()
|
||
checks["savetv_login"] = {"ok": s is not None, "detail": "Session aktiv" if s else "Login fehlgeschlagen"}
|
||
if s: ok_count += 1
|
||
except Exception as e:
|
||
checks["savetv_login"] = {"ok": False, "detail": str(e)}
|
||
total += 1
|
||
try:
|
||
t0 = _time.time()
|
||
entries = savetv._get_archive(count=5)
|
||
dur = round(_time.time() - t0, 2)
|
||
checks["savetv_archive"] = {"ok": len(entries) > 0, "detail": f"{len(entries)} Eintraege in {dur}s"}
|
||
if entries: ok_count += 1
|
||
except Exception as e:
|
||
checks["savetv_archive"] = {"ok": False, "detail": str(e)}
|
||
total += 1
|
||
try:
|
||
usage = shutil.disk_usage("/mnt/savetv")
|
||
free_gb = round(usage.free / 1024**3, 1)
|
||
mp4s = list(SAVETV_DIR.glob("*.mp4"))
|
||
total_size = round(sum(f.stat().st_size for f in mp4s) / 1024**3, 2)
|
||
checks["storage"] = {"ok": free_gb > 10, "detail": f"{len(mp4s)} Filme, {total_size} GB belegt, {free_gb} GB frei"}
|
||
if free_gb > 10: ok_count += 1
|
||
except Exception as e:
|
||
checks["storage"] = {"ok": False, "detail": str(e)}
|
||
total += 1
|
||
try:
|
||
import requests as _rq
|
||
t0 = _time.time()
|
||
sparql = 'SELECT ?year WHERE { ?f wdt:P31 wd:Q11424 . ?f rdfs:label "The Revenant"@en . ?f wdt:P577 ?date . BIND(YEAR(?date) AS ?year) } LIMIT 1'
|
||
wr = _rq.get("https://query.wikidata.org/sparql", params={"query": sparql, "format": "json"}, headers={"User-Agent": "SaveTV/1.0"}, timeout=10)
|
||
bindings = wr.json().get("results", {}).get("bindings", [])
|
||
year = bindings[0]["year"]["value"] if bindings else ""
|
||
dur = round(_time.time() - t0, 2)
|
||
checks["wikidata"] = {"ok": bool(year), "detail": f"The Revenant -> {year} ({dur}s)"}
|
||
if year: ok_count += 1
|
||
except Exception as e:
|
||
checks["wikidata"] = {"ok": False, "detail": str(e)}
|
||
total += 1
|
||
try:
|
||
import requests as _rq
|
||
r = _rq.get("http://localhost:8766/", timeout=3)
|
||
checks["nginx"] = {"ok": r.status_code == 200, "detail": f"Port 8766 -> {r.status_code}"}
|
||
if r.status_code == 200: ok_count += 1
|
||
except Exception as e:
|
||
checks["nginx"] = {"ok": False, "detail": str(e)}
|
||
total += 1
|
||
try:
|
||
import subprocess as _sp
|
||
result = _sp.run(["systemctl", "is-active", "cloudflared"], capture_output=True, text=True, timeout=5)
|
||
active = result.stdout.strip() == "active"
|
||
checks["cloudflare_tunnel"] = {"ok": active, "detail": result.stdout.strip()}
|
||
if active: ok_count += 1
|
||
except Exception as e:
|
||
checks["cloudflare_tunnel"] = {"ok": False, "detail": str(e)}
|
||
total += 1
|
||
checks["flask_web"] = {"ok": True, "detail": "Port 8765 aktiv"}
|
||
ok_count += 1
|
||
return jsonify({"healthy": ok_count == total, "ok": ok_count, "total": total, "checks": checks})
|
||
|
||
@app.route("/status")
|
||
def status_page():
|
||
nav = '<div style="display:flex;gap:16px;margin-bottom:24px"><a href="/" style="color:#999;text-decoration:none;font-size:15px">← Archiv</a><a href="/downloads" style="color:#4caf92;text-decoration:none;font-size:15px">📁 Downloads</a><a href="/status" style="color:#5a7fa8;text-decoration:none;font-size:15px">⚙️ Status</a></div>'
|
||
return '''<!DOCTYPE html>
|
||
<html lang="de"><head><meta charset="UTF-8"><title>Save.TV Status</title>
|
||
<style>
|
||
*{box-sizing:border-box}
|
||
body{background:#111118;color:#e8e8f0;font-family:system-ui,-apple-system,sans-serif;margin:0;padding:32px 40px;font-size:16px;line-height:1.6}
|
||
h1{font-size:28px;margin-bottom:4px;font-weight:700}
|
||
.sub{color:#777;font-size:15px;margin-bottom:32px}
|
||
.checks{max-width:800px}
|
||
.check{display:flex;align-items:center;gap:16px;padding:18px 20px;border-bottom:1px solid #2a2a3a;transition:background .15s}
|
||
.check:hover{background:#1a1a24}
|
||
.icon{font-size:24px;width:36px;text-align:center}
|
||
.name{font-weight:600;font-size:17px;min-width:220px}
|
||
.detail{color:#8888a8;font-size:14px}
|
||
.summary{margin:28px 0;padding:20px 24px;border-radius:8px;font-size:18px;font-weight:600}
|
||
.summary.ok{background:#122a1a;border:1px solid #4caf92;color:#4caf92}
|
||
.summary.warn{background:#2a1a0a;border:1px solid #ffa726;color:#ffa726}
|
||
.summary.fail{background:#2a0d0d;border:1px solid #ff3d3d;color:#ff3d3d}
|
||
.loading{color:#777;font-size:17px;padding:40px 0}
|
||
.features{max-width:800px;margin-top:40px}
|
||
.features h2{font-size:20px;margin-bottom:16px;font-weight:700}
|
||
.feat{display:flex;align-items:center;gap:12px;padding:10px 0;border-bottom:1px solid #1e1e2e;font-size:15px}
|
||
.feat .dot{width:8px;height:8px;border-radius:50%;background:#4caf92}
|
||
.feat .label{color:#ccc}
|
||
.feat .ver{color:#666;margin-left:auto;font-size:13px}
|
||
</style>
|
||
</head><body>
|
||
''' + nav + '''
|
||
<h1>⚙️ System Status</h1>
|
||
<div class="sub">Save.TV Download-System — Live-Checks</div>
|
||
<div id="result" class="loading">Prüfe Systeme...</div>
|
||
<div class="checks" id="checks"></div>
|
||
<div class="features">
|
||
<h2>📝 Funktionsübersicht</h2>
|
||
<div class="feat"><div class="dot"></div><div class="label">Save.TV EPG-Scanner (täglich 14:00, Auto-Aufnahme)</div><div class="ver">17.03.2026</div></div>
|
||
<div class="feat"><div class="dot"></div><div class="label">Archiv-Bewertung: Kino-Highlights vs. TV-Filme</div><div class="ver">17.03.2026</div></div>
|
||
<div class="feat"><div class="dot"></div><div class="label">Film-Download auf Hetzner (HD werbefrei)</div><div class="ver">17.03.2026</div></div>
|
||
<div class="feat"><div class="dot"></div><div class="label">Web-Dashboard: Archiv durchsuchen + Download starten</div><div class="ver">17.03.2026</div></div>
|
||
<div class="feat"><div class="dot"></div><div class="label">Jellyfin-Naming: Film (Jahr).mp4 via Wikidata</div><div class="ver">20.03.2026</div></div>
|
||
<div class="feat"><div class="dot"></div><div class="label">Download-Seite mit direkten HTTP-Downloads</div><div class="ver">20.03.2026</div></div>
|
||
<div class="feat"><div class="dot"></div><div class="label">Löschfunktion (Web-UI)</div><div class="ver">20.03.2026</div></div>
|
||
<div class="feat"><div class="dot"></div><div class="label">Cloudflare Tunnel (savetv.orbitalo.net)</div><div class="ver">20.03.2026</div></div>
|
||
<div class="feat"><div class="dot"></div><div class="label">Direkter Download (138.201.84.95:9443, Basic Auth)</div><div class="ver">20.03.2026</div></div>
|
||
<div class="feat"><div class="dot"></div><div class="label">nginx Reverse Proxy + Static File Serving</div><div class="ver">20.03.2026</div></div>
|
||
<div class="feat"><div class="dot"></div><div class="label">Telegram Bot Integration (Aufnahme, Status, Tipps)</div><div class="ver">17.03.2026</div></div>
|
||
<div class="feat"><div class="dot"></div><div class="label">Status-Seite + Health-Endpoint</div><div class="ver">20.03.2026</div></div>
|
||
</div>
|
||
<script>
|
||
fetch("/health").then(r=>r.json()).then(d=>{
|
||
var s=document.getElementById("result");
|
||
if(d.healthy){s.className="summary ok";s.textContent="\\u2705 Alles OK \\u2014 "+d.ok+"/"+d.total+" Checks bestanden"}
|
||
else if(d.ok>d.total/2){s.className="summary warn";s.textContent="\\u26a0\\ufe0f "+d.ok+"/"+d.total+" Checks bestanden"}
|
||
else{s.className="summary fail";s.textContent="\\u274c "+d.ok+"/"+d.total+" Checks bestanden"}
|
||
var c=document.getElementById("checks");
|
||
var html="";
|
||
var order=["savetv_login","savetv_archive","storage","wikidata","nginx","cloudflare_tunnel","flask_web"];
|
||
var names={"savetv_login":"Save.TV Login","savetv_archive":"Archiv-Zugriff","storage":"Speicher","wikidata":"Wikidata Lookup","nginx":"nginx Proxy","cloudflare_tunnel":"Cloudflare Tunnel","flask_web":"Flask Web-UI"};
|
||
order.forEach(function(k){
|
||
var v=d.checks[k];if(!v)return;
|
||
html+="<div class=\\"check\\"><div class=\\"icon\\">"+(v.ok?"\\u2705":"\\u274c")+"</div><div class=\\"name\\">"+names[k]+"</div><div class=\\"detail\\">"+v.detail+"</div></div>";
|
||
});
|
||
c.innerHTML=html;
|
||
});
|
||
</script></body></html>'''
|