feat(savetv): Jellyfin-Filter, Archiv-Cache, Stabilitäts-Fixes
- savetv_web.py: Archiv-Cache mit Hintergrund-Refresh (kein Blockieren bei Save.TV-Login) - savetv_web.py: Flask threaded=True für parallele Requests - savetv_web.py: Jellyfin-Duplikate-Filter (Checkbox, default: ein) - tools/savetv.py: Login-Timeout (8s connect, 20s read) + modernerer User-Agent - savetv_sync.py: Dateien unter 700 MB werden übersprungen (kein SD-Schrott) - CT 116: www.save.tv statisch in /etc/hosts → kein DNS-GIL-Block mehr - CT 116: RAM von 512 MB auf 1 GB erhöht (war der Hauptgrund für Einfrieren)
This commit is contained in:
parent
29ffcc6bf0
commit
eb47bcb8d1
3 changed files with 182 additions and 3 deletions
118
homelab-ai-bot/savetv_sync.py
Normal file
118
homelab-ai-bot/savetv_sync.py
Normal file
|
|
@ -0,0 +1,118 @@
|
|||
#!/usr/bin/env python3
|
||||
"""
|
||||
SaveTV → NAS Sync
|
||||
Wartet 12h ± 0-30min nach Download auf Hetzner bevor Datei auf NAS kommt.
|
||||
"""
|
||||
import os, json, time, random, urllib.request, urllib.parse, urllib.error, email.utils
|
||||
from datetime import datetime
|
||||
|
||||
ZIEL = "/mnt/nas/Filme zum nachbearbeiten"
|
||||
BASE = "http://138.201.84.95:9443/files"
|
||||
API = "http://138.201.84.95:9443/api/films"
|
||||
CALLBACK = "http://138.201.84.95:9443/api/nas_synced"
|
||||
LOG = "/var/log/savetv_sync.log"
|
||||
|
||||
MIN_ALTER_H = 24
|
||||
JITTER_MIN = 30 # ± bis zu 30 Minuten Zufall
|
||||
|
||||
def log(msg):
|
||||
ts = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
||||
line = f"[{ts}] {msg}"
|
||||
print(line, flush=True)
|
||||
with open(LOG, "a") as f:
|
||||
f.write(line + "\n")
|
||||
|
||||
def get_filmliste():
|
||||
try:
|
||||
with urllib.request.urlopen(API, timeout=15) as r:
|
||||
d = json.loads(r.read())
|
||||
films = d.get("downloads", [])
|
||||
return [f if isinstance(f, str) else f.get("name", "") for f in films if f]
|
||||
except Exception as e:
|
||||
log(f"API-Fehler: {e}")
|
||||
return []
|
||||
|
||||
def get_file_age_hours(name):
|
||||
"""Alter der Datei auf Hetzner in Stunden via Last-Modified Header."""
|
||||
url = BASE + "/" + urllib.parse.quote(name)
|
||||
try:
|
||||
req = urllib.request.Request(url, method="HEAD")
|
||||
with urllib.request.urlopen(req, timeout=10) as r:
|
||||
lm = r.headers.get("Last-Modified")
|
||||
if lm:
|
||||
ts = email.utils.parsedate_to_datetime(lm).timestamp()
|
||||
return (time.time() - ts) / 3600
|
||||
except Exception:
|
||||
pass
|
||||
return None
|
||||
|
||||
def sync():
|
||||
filme = get_filmliste()
|
||||
if not filme:
|
||||
log("Keine Filme in API")
|
||||
return
|
||||
|
||||
log(f"{len(filme)} Filme in API")
|
||||
kopiert = 0
|
||||
|
||||
for name in filme:
|
||||
if not name:
|
||||
continue
|
||||
|
||||
ziel = os.path.join(ZIEL, name)
|
||||
|
||||
# Schon vorhanden und vollständig?
|
||||
if os.path.exists(ziel) and os.path.getsize(ziel) > 700 * 1024 * 1024:
|
||||
continue
|
||||
|
||||
# Dateigröße prüfen (HEAD)
|
||||
url = BASE + "/" + urllib.parse.quote(name)
|
||||
try:
|
||||
req = urllib.request.Request(url, method="HEAD")
|
||||
with urllib.request.urlopen(req, timeout=10) as r:
|
||||
cl = int(r.headers.get("Content-Length", 0))
|
||||
size_mb = cl / 1024 / 1024
|
||||
except Exception:
|
||||
size_mb = 0
|
||||
if size_mb < 700:
|
||||
log(f"SKIP (zu klein, {size_mb:.0f} MB): {name}")
|
||||
continue
|
||||
|
||||
# Alter prüfen
|
||||
alter_h = get_file_age_hours(name)
|
||||
if alter_h is None:
|
||||
log(f"SKIP (kein Header): {name}")
|
||||
continue
|
||||
|
||||
# Wartezeit: 12h + zufällige 0-30min
|
||||
warte_h = MIN_ALTER_H + random.uniform(0, JITTER_MIN / 60)
|
||||
|
||||
if alter_h < warte_h:
|
||||
rest_min = (warte_h - alter_h) * 60
|
||||
log(f"WARTE noch {rest_min:.0f} min: {name} (Alter: {alter_h:.1f}h)")
|
||||
continue
|
||||
|
||||
# Kopieren
|
||||
url = BASE + "/" + urllib.parse.quote(name)
|
||||
log(f"LADE ({alter_h:.1f}h alt): {name}")
|
||||
try:
|
||||
urllib.request.urlretrieve(url, ziel)
|
||||
size_mb = os.path.getsize(ziel) / 1024 / 1024
|
||||
log(f" OK ({size_mb:.0f} MB): {name}")
|
||||
kopiert += 1
|
||||
try:
|
||||
body = json.dumps({"name": name}).encode()
|
||||
req = urllib.request.Request(CALLBACK, data=body,
|
||||
headers={"Content-Type": "application/json"}, method="POST")
|
||||
urllib.request.urlopen(req, timeout=5)
|
||||
except Exception:
|
||||
pass
|
||||
except Exception as e:
|
||||
log(f" FEHLER: {e}: {name}")
|
||||
if os.path.exists(ziel):
|
||||
os.remove(ziel)
|
||||
|
||||
log(f"Fertig. {kopiert} neue Filme kopiert.")
|
||||
|
||||
if __name__ == "__main__":
|
||||
sync()
|
||||
|
|
@ -24,6 +24,55 @@ app = Flask(__name__)
|
|||
|
||||
DOWNLOAD_LOG = Path("/mnt/savetv/.download_log.json")
|
||||
|
||||
# ── Archiv-Cache (Hintergrund-Refresh) ──────────────────────────────────────
|
||||
_ARCHIVE_CACHE_FILE = Path("/mnt/savetv/.archive_cache.json")
|
||||
_ARCHIVE_CACHE_TTL = 1800 # 30 min
|
||||
_archive_lock = threading.Lock()
|
||||
_archive_refreshing = False
|
||||
|
||||
def _load_archive_cache():
|
||||
"""Aus Datei-Cache lesen (sofort, kein Netzwerk)."""
|
||||
try:
|
||||
if _ARCHIVE_CACHE_FILE.exists():
|
||||
data = json.loads(_ARCHIVE_CACHE_FILE.read_text())
|
||||
return data.get("entries", []), float(data.get("ts", 0))
|
||||
except Exception:
|
||||
pass
|
||||
return [], 0.0
|
||||
|
||||
def _save_archive_cache(entries):
|
||||
try:
|
||||
_ARCHIVE_CACHE_FILE.write_text(
|
||||
json.dumps({"ts": datetime.now().timestamp(), "entries": entries},
|
||||
ensure_ascii=False))
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def _refresh_archive_bg():
|
||||
"""Save.TV-Archiv im Hintergrund aktualisieren."""
|
||||
global _archive_refreshing
|
||||
with _archive_lock:
|
||||
if _archive_refreshing:
|
||||
return
|
||||
_archive_refreshing = True
|
||||
try:
|
||||
entries = savetv._get_full_archive()
|
||||
if entries:
|
||||
_save_archive_cache(entries)
|
||||
except Exception:
|
||||
pass
|
||||
finally:
|
||||
_archive_refreshing = False
|
||||
|
||||
def _get_archive_cached():
|
||||
"""Sofort aus Cache zurückgeben, Hintergrund-Refresh anstoßen wenn nötig."""
|
||||
entries, ts = _load_archive_cache()
|
||||
age = datetime.now().timestamp() - ts
|
||||
if age > _ARCHIVE_CACHE_TTL or not entries:
|
||||
t = threading.Thread(target=_refresh_archive_bg, daemon=True)
|
||||
t.start()
|
||||
return entries
|
||||
|
||||
|
||||
def _load_download_log():
|
||||
if DOWNLOAD_LOG.exists():
|
||||
|
|
@ -396,6 +445,10 @@ HTML = r"""<!DOCTYPE html>
|
|||
<button class="filter-btn" onclick="setFilter('kino')">🎬 Kino</button>
|
||||
<button class="filter-btn" onclick="setFilter('tv')">📺 TV-Film</button>
|
||||
<input class="search-input" id="search" placeholder="Suchen..." oninput="renderFilms()">
|
||||
<label id="jf-toggle-label" style="display:none;align-items:center;gap:6px;font-size:14px;color:var(--muted);cursor:pointer;margin-left:8px">
|
||||
<input type="checkbox" id="jf-hide-cb" checked onchange="hideJellyfin=this.checked;renderFilms()">
|
||||
Jellyfin-Duplikate ausblenden
|
||||
</label>
|
||||
</div>
|
||||
<div id="urgent-section" style="display:none">
|
||||
<div class="section-header">
|
||||
|
|
@ -440,6 +493,7 @@ let infoLoading = false;
|
|||
let dlProgress = {};
|
||||
let polling = false;
|
||||
let jellyfinSet = null;
|
||||
let hideJellyfin = true;
|
||||
|
||||
function esc(s) {
|
||||
var d = document.createElement('div');
|
||||
|
|
@ -503,6 +557,11 @@ async function loadFilms() {
|
|||
(data.stored_titles || []).forEach(function(t) { storedSet.add(t); });
|
||||
document.getElementById('loading').style.display = 'none';
|
||||
document.getElementById('content').style.display = 'block';
|
||||
if (!allFilms.length) {
|
||||
document.getElementById('header-stats').textContent = 'Archiv wird im Hintergrund geladen... Seite in 15s neu laden.';
|
||||
setTimeout(function(){ location.reload(); }, 15000);
|
||||
return;
|
||||
}
|
||||
var storedCount = 0;
|
||||
allFilms.forEach(function(f) { if (storedSet.has(normTitle(f.title))) storedCount++; });
|
||||
var statsLine = data.total + ' Aufnahmen \u00b7 ' + data.kino + ' Kino \u00b7 ' + data.urgent + ' dringend \u00b7 ' + storedCount + ' gespeichert';
|
||||
|
|
@ -512,6 +571,7 @@ async function loadFilms() {
|
|||
if (jdata.ok && Array.isArray(jdata.normalized_titles)) {
|
||||
jellyfinSet = new Set(jdata.normalized_titles);
|
||||
statsLine += ' \u00b7 Jellyfin ' + (jdata.count || 0) + ' Filme';
|
||||
document.getElementById('jf-toggle-label').style.display = 'flex';
|
||||
}
|
||||
} catch (e) {}
|
||||
document.getElementById('header-stats').textContent = statsLine;
|
||||
|
|
@ -532,6 +592,7 @@ function renderFilms() {
|
|||
var q = document.getElementById('search').value.toLowerCase();
|
||||
var films = allFilms.filter(function(f) {
|
||||
if (q && f.title.toLowerCase().indexOf(q) === -1) return false;
|
||||
if (hideJellyfin && jellyfinSet && jellyfinSet.has(normTitle(f.title))) return false;
|
||||
if (currentFilter === 'urgent') return f.days_left <= 7;
|
||||
if (currentFilter === 'kino') return f.cinema && f.days_left > 7;
|
||||
if (currentFilter === 'tv') return !f.cinema;
|
||||
|
|
@ -874,4 +935,4 @@ except ImportError:
|
|||
pass
|
||||
|
||||
if __name__ == "__main__":
|
||||
app.run(host="0.0.0.0", port=8765, debug=False)
|
||||
app.run(host="0.0.0.0", port=8765, debug=False, threaded=True)
|
||||
|
|
|
|||
|
|
@ -158,14 +158,14 @@ def _get_session():
|
|||
return _session
|
||||
|
||||
s = requests.Session()
|
||||
s.headers.update({"User-Agent": "Mozilla/5.0 Hausmeister-Bot/1.0"})
|
||||
s.headers.update({"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36"})
|
||||
|
||||
try:
|
||||
s.post(
|
||||
SAVETV_URL + "/STV/M/Index.cfm?sk=PREMIUM",
|
||||
data={"sUsername": SAVETV_USER, "sPassword": SAVETV_PASS, "value": "Login"},
|
||||
allow_redirects=True,
|
||||
timeout=15,
|
||||
timeout=(8, 20), # (connect_timeout, read_timeout)
|
||||
)
|
||||
cookies = s.cookies.get_dict()
|
||||
if not cookies.get("savetv_active_login"):
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue