- DB-Migration: UNIQUE(date) → UNIQUE(date, post_time) — alte DBs werden automatisch beim Start migriert (database.py init_db) - api_save: gibt article_id zurück für nachgelagerte Operationen - confirmPlan(): speichert auf selectedDate, verschiebt dann ggf. per reschedule auf Zieldatum — fixes "Kein Artikel für diesen Tag vorhanden" - Alle Source-Dateien (app.py, database.py, templates, ...) hinzugefügt - arakava-news: cursor-memory-system Artikel + SVG-Diagramm hinzugefügt Made-with: Cursor
100 lines
2.8 KiB
Python
100 lines
2.8 KiB
Python
"""
|
|
Strukturiertes Logging für FünfVorAcht.
|
|
Schreibt JSON-Lines nach /logs/fuenfvoracht.log
|
|
"""
|
|
import json
|
|
import logging
|
|
import os
|
|
from datetime import datetime
|
|
|
|
LOG_PATH = os.environ.get('LOG_PATH', '/logs/fuenfvoracht.log')
|
|
|
|
_file_handler = None
|
|
|
|
|
|
def _get_file_handler():
|
|
global _file_handler
|
|
if _file_handler is None:
|
|
os.makedirs(os.path.dirname(LOG_PATH), exist_ok=True)
|
|
_file_handler = logging.FileHandler(LOG_PATH, encoding='utf-8')
|
|
_file_handler.setLevel(logging.DEBUG)
|
|
return _file_handler
|
|
|
|
|
|
def _write(level: str, event: str, **kwargs):
|
|
entry = {
|
|
'ts': datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%SZ'),
|
|
'level': level,
|
|
'event': event,
|
|
**kwargs,
|
|
}
|
|
line = json.dumps(entry, ensure_ascii=False)
|
|
try:
|
|
handler = _get_file_handler()
|
|
record = logging.LogRecord(
|
|
name='fuenfvoracht', level=getattr(logging, level),
|
|
pathname='', lineno=0, msg=line, args=(), exc_info=None
|
|
)
|
|
handler.emit(record)
|
|
except Exception:
|
|
pass
|
|
# Auch in stdout damit docker logs es zeigt
|
|
print(line, flush=True)
|
|
|
|
|
|
def info(event: str, **kwargs):
|
|
_write('INFO', event, **kwargs)
|
|
|
|
|
|
def warning(event: str, **kwargs):
|
|
_write('WARNING', event, **kwargs)
|
|
|
|
|
|
def error(event: str, **kwargs):
|
|
_write('ERROR', event, **kwargs)
|
|
|
|
|
|
# Kurzformen für häufige Events
|
|
def article_generated(date: str, source: str, version: int, tag: str):
|
|
info('article_generated', date=date, source=source[:120], version=version, tag=tag)
|
|
|
|
|
|
def article_saved(date: str, post_time: str):
|
|
info('article_saved', date=date, post_time=post_time)
|
|
|
|
|
|
def article_scheduled(date: str, post_time: str, notify_at: str):
|
|
info('article_scheduled', date=date, post_time=post_time, notify_at=notify_at)
|
|
|
|
|
|
def article_sent_to_bot(date: str, post_time: str, chat_ids: list):
|
|
info('article_sent_to_bot', date=date, post_time=post_time, chat_ids=chat_ids)
|
|
|
|
|
|
def article_approved(date: str, post_time: str, by_chat_id: int):
|
|
info('article_approved', date=date, post_time=post_time, by_chat_id=by_chat_id)
|
|
|
|
|
|
def article_posted(date: str, post_time: str, channel_id: str, message_id: int):
|
|
info('article_posted', date=date, post_time=post_time,
|
|
channel_id=channel_id, message_id=message_id)
|
|
|
|
|
|
def article_skipped(date: str, post_time: str):
|
|
info('article_skipped', date=date, post_time=post_time)
|
|
|
|
|
|
def posting_failed(date: str, post_time: str, reason: str):
|
|
error('posting_failed', date=date, post_time=post_time, reason=reason[:300])
|
|
|
|
|
|
def reviewer_added(chat_id: int, name: str):
|
|
info('reviewer_added', chat_id=chat_id, name=name)
|
|
|
|
|
|
def reviewer_removed(chat_id: int):
|
|
info('reviewer_removed', chat_id=chat_id)
|
|
|
|
|
|
def slot_conflict(date: str, post_time: str):
|
|
warning('slot_conflict', date=date, post_time=post_time)
|