feat(savetv): add savetv_extra_routes.py to repo — sortable downloads page
Downloads page now has sort buttons (date/name/size) with toggle asc/desc. Default: newest first. Client-side JS sorting, no page reload needed. Previously this file was only in /opt/ outside git.
This commit is contained in:
parent
6e8a16004c
commit
902441bbbc
1 changed files with 556 additions and 0 deletions
556
homelab-ai-bot/savetv_extra_routes.py
Normal file
556
homelab-ai-bot/savetv_extra_routes.py
Normal file
|
|
@ -0,0 +1,556 @@
|
|||
"""Extra Routes fuer savetv_web.py - nicht im Git, lokal in CT 116."""
|
||||
|
||||
import html as _html
|
||||
import shutil
|
||||
import time as _time
|
||||
from pathlib import Path
|
||||
from urllib.parse import quote as _urlquote
|
||||
from flask import send_from_directory, request, jsonify
|
||||
|
||||
SAVETV_DIR = Path("/mnt/savetv")
|
||||
|
||||
|
||||
def register_extra_routes(app, progress_lock=None, load_progress_raw=None, save_progress_raw=None):
|
||||
import threading as _threading
|
||||
_plock = progress_lock if progress_lock is not None else _threading.Lock()
|
||||
|
||||
def _load_prog():
|
||||
if load_progress_raw is not None:
|
||||
return load_progress_raw()
|
||||
pf = SAVETV_DIR / ".download_progress.json"
|
||||
import json as _j
|
||||
return _j.loads(pf.read_text()) if pf.exists() else {}
|
||||
|
||||
def _save_prog(prog):
|
||||
if save_progress_raw is not None:
|
||||
save_progress_raw(prog)
|
||||
else:
|
||||
import json as _j
|
||||
(SAVETV_DIR / ".download_progress.json").write_text(_j.dumps(prog, ensure_ascii=False, indent=2))
|
||||
|
||||
@app.route("/files/<path:filename>")
|
||||
def serve_file(filename):
|
||||
return send_from_directory(str(SAVETV_DIR), filename, as_attachment=True)
|
||||
|
||||
@app.route("/api/delete", methods=["POST"])
|
||||
def api_delete():
|
||||
data = request.get_json()
|
||||
filename = data.get("filename", "")
|
||||
if not filename or ".." in filename or "/" in filename:
|
||||
return jsonify({"ok": False, "error": "Ungueltig"}), 400
|
||||
target = SAVETV_DIR / filename
|
||||
if not target.exists():
|
||||
return jsonify({"ok": False, "error": "Nicht gefunden"}), 404
|
||||
try:
|
||||
target.unlink()
|
||||
return jsonify({"ok": True, "deleted": filename})
|
||||
except Exception as e:
|
||||
return jsonify({"ok": False, "error": str(e)}), 500
|
||||
|
||||
@app.route("/downloads")
|
||||
def downloads_page():
|
||||
files = []
|
||||
for fp in SAVETV_DIR.iterdir():
|
||||
if fp.suffix == ".mp4":
|
||||
st = fp.stat()
|
||||
size_mb = round(st.st_size / 1024 / 1024, 1)
|
||||
mtime = st.st_mtime
|
||||
files.append((fp.name, size_mb, mtime))
|
||||
files.sort(key=lambda x: x[2], reverse=True)
|
||||
total_gb = round(sum(s for _, s, _ in files) / 1024, 2)
|
||||
nav = '<div style="display:flex;gap:16px;margin-bottom:24px"><a href="/" style="color:#999;text-decoration:none;font-size:15px">← Archiv</a><a href="/downloads" style="color:#4caf92;text-decoration:none;font-size:15px">📁 Downloads</a><a href="/status" style="color:#5a7fa8;text-decoration:none;font-size:15px">⚙️ Status</a></div>'
|
||||
rows = ""
|
||||
from datetime import datetime as _dt
|
||||
for name, size, mtime in files:
|
||||
clean = name.rsplit(".", 1)[0]
|
||||
esc = _html.escape(name, quote=True)
|
||||
date_str = _dt.fromtimestamp(mtime).strftime("%d.%m.%Y")
|
||||
rows += (
|
||||
'<tr data-name="' + _html.escape(clean.lower(), quote=True) + '" '
|
||||
'data-date="' + str(int(mtime)) + '" '
|
||||
'data-size="' + str(size) + '">'
|
||||
'<td style="padding:16px 20px;font-size:18px;font-weight:500">' + clean + '</td>'
|
||||
'<td style="padding:16px 20px;color:#666;font-size:14px;white-space:nowrap">' + date_str + '</td>'
|
||||
'<td style="padding:16px 20px;color:#888;font-size:15px;white-space:nowrap">' + str(size) + ' MB</td>'
|
||||
'<td style="padding:16px 20px;white-space:nowrap">'
|
||||
'<a href="/files/' + _urlquote(name) + '" download="' + esc + '" '
|
||||
'style="background:#4caf92;color:#000;font-weight:700;padding:8px 20px;border-radius:5px;text-decoration:none;font-size:15px">'
|
||||
'⬇ Download</a> '
|
||||
'<button data-file="' + esc + '" onclick="delFile(this)" '
|
||||
'style="background:#ff3d3d;color:#fff;font-weight:700;padding:8px 16px;border-radius:5px;border:none;cursor:pointer;font-size:15px;margin-left:10px">'
|
||||
'🗑 L\u00f6schen</button>'
|
||||
'</td></tr>'
|
||||
)
|
||||
return (
|
||||
'<!DOCTYPE html><html lang="de"><head><meta charset="UTF-8"><title>Downloads</title>'
|
||||
'<style>'
|
||||
'*{box-sizing:border-box}'
|
||||
'body{background:#111118;color:#e8e8f0;font-family:system-ui,-apple-system,sans-serif;margin:0;padding:32px 40px;font-size:16px;line-height:1.5}'
|
||||
'h1{font-size:28px;margin-bottom:8px;font-weight:700;letter-spacing:-0.5px}'
|
||||
'.sub{color:#777;font-size:15px;margin-bottom:16px}'
|
||||
'.sortbar{display:flex;align-items:center;gap:8px;margin-bottom:24px;flex-wrap:wrap}'
|
||||
'.sortbar span{color:#666;font-size:14px;margin-right:4px}'
|
||||
'.sbtn{background:#1e1e2e;color:#999;border:1px solid #2a2a3a;padding:7px 16px;border-radius:6px;cursor:pointer;font-size:14px;font-weight:600;transition:all .15s}'
|
||||
'.sbtn:hover{background:#252538;color:#ccc;border-color:#3a3a5a}'
|
||||
'.sbtn.active{background:#1a2a3a;color:#4caf92;border-color:#4caf92}'
|
||||
'.sbtn .arrow{margin-left:5px;font-size:11px}'
|
||||
'table{border-collapse:collapse;width:100%;max-width:1100px}'
|
||||
'tr:hover{background:#1a1a24}'
|
||||
'tr{border-bottom:1px solid #2a2a3a}'
|
||||
'</style></head><body>'
|
||||
+ nav +
|
||||
'<h1>📁 Gespeicherte Filme</h1>'
|
||||
'<div class="sub">' + str(len(files)) + ' Dateien · ' + str(total_gb) + ' GB</div>'
|
||||
'<div class="sortbar"><span>Sortieren:</span>'
|
||||
'<button class="sbtn active" data-sort="date" data-dir="desc" onclick="sortBy(this)">Datum <span class="arrow">▼</span></button>'
|
||||
'<button class="sbtn" data-sort="name" data-dir="asc" onclick="sortBy(this)">Name <span class="arrow">▲</span></button>'
|
||||
'<button class="sbtn" data-sort="size" data-dir="desc" onclick="sortBy(this)">Größe <span class="arrow">▼</span></button>'
|
||||
'</div>'
|
||||
'<table id="ftable"><tbody id="fbody">' + rows + '</tbody></table>'
|
||||
'<script>'
|
||||
'function sortBy(btn){'
|
||||
' var key=btn.getAttribute("data-sort");'
|
||||
' var dir=btn.getAttribute("data-dir");'
|
||||
' var prev=document.querySelector(".sbtn.active");'
|
||||
' if(prev&&prev===btn){dir=dir==="asc"?"desc":"asc";btn.setAttribute("data-dir",dir)}'
|
||||
' else{if(prev)prev.classList.remove("active");btn.classList.add("active")}'
|
||||
' btn.querySelector(".arrow").innerHTML=dir==="asc"?"▲":"▼";'
|
||||
' var tb=document.getElementById("fbody");'
|
||||
' var rows=Array.from(tb.querySelectorAll("tr"));'
|
||||
' rows.sort(function(a,b){'
|
||||
' var av,bv;'
|
||||
' if(key==="date"){av=parseInt(a.getAttribute("data-date"));bv=parseInt(b.getAttribute("data-date"))}'
|
||||
' else if(key==="size"){av=parseFloat(a.getAttribute("data-size"));bv=parseFloat(b.getAttribute("data-size"))}'
|
||||
' else{av=a.getAttribute("data-name");bv=b.getAttribute("data-name");return dir==="asc"?av.localeCompare(bv):bv.localeCompare(av)}'
|
||||
' return dir==="asc"?av-bv:bv-av'
|
||||
' });'
|
||||
' rows.forEach(function(r){tb.appendChild(r)})'
|
||||
'}'
|
||||
'function delFile(btn){var n=btn.getAttribute("data-file");if(!confirm("Wirklich loeschen? "+n))return;fetch("/api/delete",{method:"POST",headers:{"Content-Type":"application/json"},body:JSON.stringify({filename:n})}).then(function(r){return r.json()}).then(function(d){if(d.ok){btn.closest("tr").remove()}else{alert(d.error)}})}'
|
||||
'</script></body></html>'
|
||||
)
|
||||
|
||||
|
||||
FILMINFO_CACHE = Path("/mnt/savetv/.filminfo_cache.json")
|
||||
BOGUS_GENRES = {"Stummfilm", "Tonfilm", "Farbfilm", "Schwarzweissfilm",
|
||||
"Langfilm", "Kurzfilm", "Independentfilm"}
|
||||
|
||||
def _load_filminfo_cache():
|
||||
if FILMINFO_CACHE.exists():
|
||||
try:
|
||||
import json as _json
|
||||
return _json.loads(FILMINFO_CACHE.read_text())
|
||||
except Exception:
|
||||
pass
|
||||
return {}
|
||||
|
||||
def _save_filminfo_cache(cache):
|
||||
import json as _json
|
||||
FILMINFO_CACHE.write_text(_json.dumps(cache, ensure_ascii=False, indent=1))
|
||||
|
||||
def _wikidata_lookup(title):
|
||||
"""Lookup year/genre/country for a film title via Wikidata."""
|
||||
import requests as _rq
|
||||
import re
|
||||
|
||||
search_title = re.sub(r"\s*[-\u2013\u2014]\s*.+$", "", title).strip()
|
||||
result = {"year": "", "genres": [], "countries": []}
|
||||
|
||||
def _parse_bindings(bindings):
|
||||
year = ""
|
||||
genres = set()
|
||||
countries = set()
|
||||
for b in bindings:
|
||||
if not year and b.get("year", {}).get("value"):
|
||||
year = b["year"]["value"]
|
||||
if b.get("genreLabel", {}).get("value"):
|
||||
genres.add(b["genreLabel"]["value"])
|
||||
if b.get("countryLabel", {}).get("value"):
|
||||
countries.add(b["countryLabel"]["value"])
|
||||
return year, sorted(genres)[:3], sorted(countries)[:2]
|
||||
|
||||
for lang in ["de", "en"]:
|
||||
sparql = ('SELECT ?year ?genreLabel ?countryLabel WHERE {{ '
|
||||
'?film wdt:P31 wd:Q11424 . '
|
||||
'?film rdfs:label "{t}"@{l} . '
|
||||
'OPTIONAL {{ ?film wdt:P577 ?date }} '
|
||||
'OPTIONAL {{ ?film wdt:P136 ?genre }} '
|
||||
'OPTIONAL {{ ?film wdt:P495 ?country }} '
|
||||
'BIND(YEAR(?date) AS ?year) '
|
||||
'SERVICE wikibase:label {{ bd:serviceParam wikibase:language "de,en" }} '
|
||||
'}} LIMIT 20').format(t=search_title.replace('"', '\\"'), l=lang)
|
||||
try:
|
||||
r = _rq.get("https://query.wikidata.org/sparql",
|
||||
params={"query": sparql, "format": "json"},
|
||||
headers={"User-Agent": "SaveTV/1.0"}, timeout=8)
|
||||
bindings = r.json().get("results", {}).get("bindings", [])
|
||||
if bindings:
|
||||
y, g, c = _parse_bindings(bindings)
|
||||
return {"year": y, "genres": [x for x in g if x not in BOGUS_GENRES], "countries": c}
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Fallback: Wikidata search
|
||||
try:
|
||||
sr = _rq.get("https://www.wikidata.org/w/api.php",
|
||||
params={"action": "wbsearchentities", "search": search_title,
|
||||
"language": "de", "type": "item", "limit": "3", "format": "json"},
|
||||
headers={"User-Agent": "SaveTV/1.0"}, timeout=8)
|
||||
for item in sr.json().get("search", []):
|
||||
qid = item.get("id", "")
|
||||
sparql_q = ('SELECT ?year ?genreLabel ?countryLabel WHERE {{ '
|
||||
'BIND(wd:{qid} AS ?film) '
|
||||
'?film wdt:P31 wd:Q11424 . '
|
||||
'OPTIONAL {{ ?film wdt:P577 ?date }} '
|
||||
'OPTIONAL {{ ?film wdt:P136 ?genre }} '
|
||||
'OPTIONAL {{ ?film wdt:P495 ?country }} '
|
||||
'BIND(YEAR(?date) AS ?year) '
|
||||
'SERVICE wikibase:label {{ bd:serviceParam wikibase:language "de,en" }} '
|
||||
'}} LIMIT 20').format(qid=qid)
|
||||
r2 = _rq.get("https://query.wikidata.org/sparql",
|
||||
params={"query": sparql_q, "format": "json"},
|
||||
headers={"User-Agent": "SaveTV/1.0"}, timeout=8)
|
||||
bindings = r2.json().get("results", {}).get("bindings", [])
|
||||
if bindings:
|
||||
y, g, c = _parse_bindings(bindings)
|
||||
return {"year": y, "genres": [x for x in g if x not in BOGUS_GENRES], "countries": c}
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return result
|
||||
|
||||
@app.route("/api/filminfo")
|
||||
def api_filminfo():
|
||||
title = request.args.get("title", "").strip()
|
||||
if not title:
|
||||
return jsonify({"error": "title missing"}), 400
|
||||
cache = _load_filminfo_cache()
|
||||
if title in cache:
|
||||
return jsonify(cache[title])
|
||||
info = _wikidata_lookup(title)
|
||||
cache[title] = info
|
||||
_save_filminfo_cache(cache)
|
||||
return jsonify(info)
|
||||
|
||||
@app.route("/api/filminfo_batch", methods=["POST"])
|
||||
def api_filminfo_batch():
|
||||
data = request.get_json()
|
||||
titles = data.get("titles", [])
|
||||
cache = _load_filminfo_cache()
|
||||
results = {}
|
||||
missing = []
|
||||
for t in titles:
|
||||
if t in cache:
|
||||
results[t] = cache[t]
|
||||
else:
|
||||
missing.append(t)
|
||||
for t in missing:
|
||||
info = _wikidata_lookup(t)
|
||||
cache[t] = info
|
||||
results[t] = info
|
||||
if missing:
|
||||
_save_filminfo_cache(cache)
|
||||
return jsonify(results)
|
||||
|
||||
@app.route("/api/download_progress")
|
||||
def api_download_progress():
|
||||
import json as _json
|
||||
import subprocess as _sp
|
||||
progress_file = SAVETV_DIR / ".download_progress.json"
|
||||
if not progress_file.exists():
|
||||
return jsonify({})
|
||||
try:
|
||||
progress = _json.loads(progress_file.read_text())
|
||||
except Exception:
|
||||
return jsonify({})
|
||||
|
||||
dl_log_file = SAVETV_DIR / ".download_log.json"
|
||||
try:
|
||||
dl_log = _json.loads(dl_log_file.read_text()) if dl_log_file.exists() else {}
|
||||
except Exception:
|
||||
dl_log = {}
|
||||
|
||||
# Stale "running"-Eintraege bereinigen: im Log als running, aber kein Progress-Eintrag
|
||||
# und kein wget-Prozess → Download ist gescheitert, Eintrag entfernen
|
||||
stale = []
|
||||
for tid, status in list(dl_log.items()):
|
||||
if status != "running":
|
||||
continue
|
||||
if tid in progress:
|
||||
continue
|
||||
# Pruefen ob wget fuer diesen TID noch laeuft
|
||||
try:
|
||||
chk = _sp.run(["pgrep", "-af", f"_{tid}.mp4"],
|
||||
capture_output=True, text=True, timeout=3)
|
||||
if "wget" in chk.stdout:
|
||||
continue
|
||||
except Exception:
|
||||
pass
|
||||
stale.append(tid)
|
||||
if stale:
|
||||
for tid in stale:
|
||||
dl_log.pop(tid, None)
|
||||
dl_log_file.write_text(_json.dumps(dl_log, ensure_ascii=False, indent=2))
|
||||
|
||||
result = {}
|
||||
completed = []
|
||||
for tid, info in list(progress.items()):
|
||||
fp = SAVETV_DIR / info["filename"]
|
||||
current = fp.stat().st_size if fp.exists() else 0
|
||||
expected = info.get("expected_bytes", 0)
|
||||
|
||||
wget_running = False
|
||||
try:
|
||||
ps = _sp.run(["pgrep", "-af", info["filename"]],
|
||||
capture_output=True, text=True, timeout=3)
|
||||
wget_running = "wget" in ps.stdout
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
done = False
|
||||
if expected > 0 and current >= expected:
|
||||
done = True
|
||||
elif not wget_running and current > 100_000:
|
||||
done = True
|
||||
|
||||
percent = round(current / expected * 100, 1) if expected > 0 else 0
|
||||
result[tid] = {
|
||||
"current_bytes": current,
|
||||
"expected_bytes": expected,
|
||||
"percent": min(percent, 100),
|
||||
"current_mb": round(current / 1024 / 1024, 1),
|
||||
"expected_mb": round(expected / 1024 / 1024, 1),
|
||||
"done": done,
|
||||
}
|
||||
|
||||
if done:
|
||||
completed.append(tid)
|
||||
|
||||
if completed:
|
||||
for tid in completed:
|
||||
info = progress.get(tid, {})
|
||||
raw_filename = info.get("filename", "")
|
||||
# Rename: "Titel_ID.mp4" -> "Titel (Jahr).mp4"
|
||||
if raw_filename:
|
||||
_rename_to_jellyfin(raw_filename, tid)
|
||||
# Auto-Delete aus Save.TV Archiv
|
||||
try:
|
||||
import sys as _sys
|
||||
_sys.path.insert(0, '/opt/homelab-ai-bot')
|
||||
from tools import savetv as _savetv
|
||||
ok, err = _savetv._delete_telecast(int(tid))
|
||||
if ok:
|
||||
import logging as _log
|
||||
_log.getLogger("savetv").info("Archiv-Eintrag %s nach Download gelöscht", tid)
|
||||
except Exception as _e:
|
||||
import logging as _log
|
||||
_log.getLogger("savetv").warning("Archiv-Delete TID %s fehlgeschlagen: %s", tid, _e)
|
||||
dl_log[tid] = "done"
|
||||
with _plock:
|
||||
cur = _load_prog()
|
||||
for tid in completed:
|
||||
cur.pop(tid, None)
|
||||
_save_prog(cur)
|
||||
dl_log_file.write_text(_json.dumps(dl_log, ensure_ascii=False, indent=2))
|
||||
|
||||
return jsonify(result)
|
||||
|
||||
def _find_cache_match(cache, clean_title):
|
||||
"""Sucht den besten Cache-Eintrag: exakt, dann normalisiert (Sonderzeichen-tolerant)."""
|
||||
if clean_title in cache and cache[clean_title].get("year"):
|
||||
return cache[clean_title]
|
||||
import re as _re2
|
||||
def _norm(s):
|
||||
return _re2.sub(r'\s+', ' ', _re2.sub(r'[^\w\s]', ' ', s)).strip().lower()
|
||||
norm = _norm(clean_title)
|
||||
for key, val in cache.items():
|
||||
if not val.get("year"):
|
||||
continue
|
||||
if _norm(key) == norm:
|
||||
return val
|
||||
return None
|
||||
|
||||
def _rename_to_jellyfin(raw_filename, tid):
|
||||
"""Benennt fertig gedownloadete Datei von 'Titel_ID.mp4' zu 'Titel (Jahr).mp4' um."""
|
||||
import re as _re
|
||||
src = SAVETV_DIR / raw_filename
|
||||
if not src.exists():
|
||||
return
|
||||
|
||||
m = _re.match(r'^(.+)_(\d{6,9})\.mp4$', raw_filename)
|
||||
if not m:
|
||||
return
|
||||
raw_title_part = m.group(1)
|
||||
|
||||
clean_title = raw_title_part.replace('_-_', ' - ').replace('_', ' ').strip()
|
||||
|
||||
cache = _load_filminfo_cache()
|
||||
matched = _find_cache_match(cache, clean_title)
|
||||
if matched:
|
||||
year = matched.get("year", "")
|
||||
else:
|
||||
if clean_title not in cache:
|
||||
cache[clean_title] = _wikidata_lookup(clean_title)
|
||||
_save_filminfo_cache(cache)
|
||||
year = cache[clean_title].get("year", "")
|
||||
|
||||
# Zieldateiname bauen
|
||||
safe_title = _re.sub(r'[\\/:*?"<>|]', '', clean_title).strip()
|
||||
if year:
|
||||
dest_name = f"{safe_title} ({year}).mp4"
|
||||
else:
|
||||
dest_name = f"{safe_title}.mp4"
|
||||
|
||||
dest = SAVETV_DIR / dest_name
|
||||
|
||||
# Nicht überschreiben falls schon vorhanden
|
||||
if dest.exists():
|
||||
# Alten Raw-File löschen
|
||||
try:
|
||||
src.unlink()
|
||||
except Exception:
|
||||
pass
|
||||
return
|
||||
|
||||
try:
|
||||
src.rename(dest)
|
||||
# Auch Progress-Info aktualisieren
|
||||
import logging
|
||||
logging.getLogger("savetv").info(f"Umbenannt: {raw_filename} -> {dest_name}")
|
||||
except Exception as e:
|
||||
import logging
|
||||
logging.getLogger("savetv").warning(f"Rename fehlgeschlagen {raw_filename}: {e}")
|
||||
|
||||
@app.route("/health")
|
||||
def health():
|
||||
from tools import savetv
|
||||
checks = {}
|
||||
ok_count = 0
|
||||
total = 0
|
||||
total += 1
|
||||
try:
|
||||
s = savetv._get_session()
|
||||
checks["savetv_login"] = {"ok": s is not None, "detail": "Session aktiv" if s else "Login fehlgeschlagen"}
|
||||
if s: ok_count += 1
|
||||
except Exception as e:
|
||||
checks["savetv_login"] = {"ok": False, "detail": str(e)}
|
||||
total += 1
|
||||
try:
|
||||
t0 = _time.time()
|
||||
entries = savetv._get_archive(count=5)
|
||||
dur = round(_time.time() - t0, 2)
|
||||
checks["savetv_archive"] = {"ok": len(entries) > 0, "detail": f"{len(entries)} Eintraege in {dur}s"}
|
||||
if entries: ok_count += 1
|
||||
except Exception as e:
|
||||
checks["savetv_archive"] = {"ok": False, "detail": str(e)}
|
||||
total += 1
|
||||
try:
|
||||
usage = shutil.disk_usage("/mnt/savetv")
|
||||
free_gb = round(usage.free / 1024**3, 1)
|
||||
mp4s = list(SAVETV_DIR.glob("*.mp4"))
|
||||
total_size = round(sum(f.stat().st_size for f in mp4s) / 1024**3, 2)
|
||||
checks["storage"] = {"ok": free_gb > 10, "detail": f"{len(mp4s)} Filme, {total_size} GB belegt, {free_gb} GB frei"}
|
||||
if free_gb > 10: ok_count += 1
|
||||
except Exception as e:
|
||||
checks["storage"] = {"ok": False, "detail": str(e)}
|
||||
total += 1
|
||||
try:
|
||||
import requests as _rq
|
||||
t0 = _time.time()
|
||||
sparql = 'SELECT ?year WHERE { ?f wdt:P31 wd:Q11424 . ?f rdfs:label "The Revenant"@en . ?f wdt:P577 ?date . BIND(YEAR(?date) AS ?year) } LIMIT 1'
|
||||
wr = _rq.get("https://query.wikidata.org/sparql", params={"query": sparql, "format": "json"}, headers={"User-Agent": "SaveTV/1.0"}, timeout=10)
|
||||
bindings = wr.json().get("results", {}).get("bindings", [])
|
||||
year = bindings[0]["year"]["value"] if bindings else ""
|
||||
dur = round(_time.time() - t0, 2)
|
||||
checks["wikidata"] = {"ok": bool(year), "detail": f"The Revenant -> {year} ({dur}s)"}
|
||||
if year: ok_count += 1
|
||||
except Exception as e:
|
||||
checks["wikidata"] = {"ok": False, "detail": str(e)}
|
||||
total += 1
|
||||
try:
|
||||
import requests as _rq
|
||||
r = _rq.get("http://localhost:8766/", timeout=3)
|
||||
checks["nginx"] = {"ok": r.status_code == 200, "detail": f"Port 8766 -> {r.status_code}"}
|
||||
if r.status_code == 200: ok_count += 1
|
||||
except Exception as e:
|
||||
checks["nginx"] = {"ok": False, "detail": str(e)}
|
||||
total += 1
|
||||
try:
|
||||
import subprocess as _sp
|
||||
result = _sp.run(["systemctl", "is-active", "cloudflared"], capture_output=True, text=True, timeout=5)
|
||||
active = result.stdout.strip() == "active"
|
||||
checks["cloudflare_tunnel"] = {"ok": active, "detail": result.stdout.strip()}
|
||||
if active: ok_count += 1
|
||||
except Exception as e:
|
||||
checks["cloudflare_tunnel"] = {"ok": False, "detail": str(e)}
|
||||
total += 1
|
||||
checks["flask_web"] = {"ok": True, "detail": "Port 8765 aktiv"}
|
||||
ok_count += 1
|
||||
return jsonify({"healthy": ok_count == total, "ok": ok_count, "total": total, "checks": checks})
|
||||
|
||||
@app.route("/status")
|
||||
def status_page():
|
||||
nav = '<div style="display:flex;gap:16px;margin-bottom:24px"><a href="/" style="color:#999;text-decoration:none;font-size:15px">← Archiv</a><a href="/downloads" style="color:#4caf92;text-decoration:none;font-size:15px">📁 Downloads</a><a href="/status" style="color:#5a7fa8;text-decoration:none;font-size:15px">⚙️ Status</a></div>'
|
||||
return '''<!DOCTYPE html>
|
||||
<html lang="de"><head><meta charset="UTF-8"><title>Save.TV Status</title>
|
||||
<style>
|
||||
*{box-sizing:border-box}
|
||||
body{background:#111118;color:#e8e8f0;font-family:system-ui,-apple-system,sans-serif;margin:0;padding:32px 40px;font-size:16px;line-height:1.6}
|
||||
h1{font-size:28px;margin-bottom:4px;font-weight:700}
|
||||
.sub{color:#777;font-size:15px;margin-bottom:32px}
|
||||
.checks{max-width:800px}
|
||||
.check{display:flex;align-items:center;gap:16px;padding:18px 20px;border-bottom:1px solid #2a2a3a;transition:background .15s}
|
||||
.check:hover{background:#1a1a24}
|
||||
.icon{font-size:24px;width:36px;text-align:center}
|
||||
.name{font-weight:600;font-size:17px;min-width:220px}
|
||||
.detail{color:#8888a8;font-size:14px}
|
||||
.summary{margin:28px 0;padding:20px 24px;border-radius:8px;font-size:18px;font-weight:600}
|
||||
.summary.ok{background:#122a1a;border:1px solid #4caf92;color:#4caf92}
|
||||
.summary.warn{background:#2a1a0a;border:1px solid #ffa726;color:#ffa726}
|
||||
.summary.fail{background:#2a0d0d;border:1px solid #ff3d3d;color:#ff3d3d}
|
||||
.loading{color:#777;font-size:17px;padding:40px 0}
|
||||
.features{max-width:800px;margin-top:40px}
|
||||
.features h2{font-size:20px;margin-bottom:16px;font-weight:700}
|
||||
.feat{display:flex;align-items:center;gap:12px;padding:10px 0;border-bottom:1px solid #1e1e2e;font-size:15px}
|
||||
.feat .dot{width:8px;height:8px;border-radius:50%;background:#4caf92}
|
||||
.feat .label{color:#ccc}
|
||||
.feat .ver{color:#666;margin-left:auto;font-size:13px}
|
||||
</style>
|
||||
</head><body>
|
||||
''' + nav + '''
|
||||
<h1>⚙️ System Status</h1>
|
||||
<div class="sub">Save.TV Download-System — Live-Checks</div>
|
||||
<div id="result" class="loading">Prüfe Systeme...</div>
|
||||
<div class="checks" id="checks"></div>
|
||||
<div class="features">
|
||||
<h2>📝 Funktionsübersicht</h2>
|
||||
<div class="feat"><div class="dot"></div><div class="label">Save.TV EPG-Scanner (täglich 14:00, Auto-Aufnahme)</div><div class="ver">17.03.2026</div></div>
|
||||
<div class="feat"><div class="dot"></div><div class="label">Archiv-Bewertung: Kino-Highlights vs. TV-Filme</div><div class="ver">17.03.2026</div></div>
|
||||
<div class="feat"><div class="dot"></div><div class="label">Film-Download auf Hetzner (HD werbefrei)</div><div class="ver">17.03.2026</div></div>
|
||||
<div class="feat"><div class="dot"></div><div class="label">Web-Dashboard: Archiv durchsuchen + Download starten</div><div class="ver">17.03.2026</div></div>
|
||||
<div class="feat"><div class="dot"></div><div class="label">Jellyfin-Naming: Film (Jahr).mp4 via Wikidata</div><div class="ver">20.03.2026</div></div>
|
||||
<div class="feat"><div class="dot"></div><div class="label">Download-Seite mit direkten HTTP-Downloads</div><div class="ver">20.03.2026</div></div>
|
||||
<div class="feat"><div class="dot"></div><div class="label">Löschfunktion (Web-UI)</div><div class="ver">20.03.2026</div></div>
|
||||
<div class="feat"><div class="dot"></div><div class="label">Cloudflare Tunnel (savetv.orbitalo.net)</div><div class="ver">20.03.2026</div></div>
|
||||
<div class="feat"><div class="dot"></div><div class="label">Direkter Download (138.201.84.95:9443, Basic Auth)</div><div class="ver">20.03.2026</div></div>
|
||||
<div class="feat"><div class="dot"></div><div class="label">nginx Reverse Proxy + Static File Serving</div><div class="ver">20.03.2026</div></div>
|
||||
<div class="feat"><div class="dot"></div><div class="label">Telegram Bot Integration (Aufnahme, Status, Tipps)</div><div class="ver">17.03.2026</div></div>
|
||||
<div class="feat"><div class="dot"></div><div class="label">Status-Seite + Health-Endpoint</div><div class="ver">20.03.2026</div></div>
|
||||
</div>
|
||||
<script>
|
||||
fetch("/health").then(r=>r.json()).then(d=>{
|
||||
var s=document.getElementById("result");
|
||||
if(d.healthy){s.className="summary ok";s.textContent="\\u2705 Alles OK \\u2014 "+d.ok+"/"+d.total+" Checks bestanden"}
|
||||
else if(d.ok>d.total/2){s.className="summary warn";s.textContent="\\u26a0\\ufe0f "+d.ok+"/"+d.total+" Checks bestanden"}
|
||||
else{s.className="summary fail";s.textContent="\\u274c "+d.ok+"/"+d.total+" Checks bestanden"}
|
||||
var c=document.getElementById("checks");
|
||||
var html="";
|
||||
var order=["savetv_login","savetv_archive","storage","wikidata","nginx","cloudflare_tunnel","flask_web"];
|
||||
var names={"savetv_login":"Save.TV Login","savetv_archive":"Archiv-Zugriff","storage":"Speicher","wikidata":"Wikidata Lookup","nginx":"nginx Proxy","cloudflare_tunnel":"Cloudflare Tunnel","flask_web":"Flask Web-UI"};
|
||||
order.forEach(function(k){
|
||||
var v=d.checks[k];if(!v)return;
|
||||
html+="<div class=\\"check\\"><div class=\\"icon\\">"+(v.ok?"\\u2705":"\\u274c")+"</div><div class=\\"name\\">"+names[k]+"</div><div class=\\"detail\\">"+v.detail+"</div></div>";
|
||||
});
|
||||
c.innerHTML=html;
|
||||
});
|
||||
</script></body></html>'''
|
||||
Loading…
Add table
Reference in a new issue