From 4d3e49ff64dfe0d99adb2c37c5f3e2b3de7904ce Mon Sep 17 00:00:00 2001 From: Julien Balet Date: Sun, 10 May 2026 12:11:42 +0200 Subject: [PATCH] added cron and notification telegram --- .env.prod | 2 + data/class_href_cache.json | 5 +- eptm_dashboard/eptm_dashboard.py | 2 + eptm_dashboard/pages/cron.py | 885 ++++++++++++++++++++++++++++ eptm_dashboard/pages/logs.py | 113 +++- eptm_dashboard/sidebar.py | 1 + logs/cron/job_3_20260510-092902.log | 141 +++++ logs/cron_tick.log | 2 + scripts/cron_tick.py | 464 +++++++++++++++ scripts/run_imports.py | 11 +- src/db.py | 47 ++ src/importer.py | 4 + src/notifier.py | 217 +++++++ 13 files changed, 1884 insertions(+), 10 deletions(-) create mode 100644 eptm_dashboard/pages/cron.py create mode 100644 logs/cron/job_3_20260510-092902.log create mode 100644 logs/cron_tick.log create mode 100755 scripts/cron_tick.py create mode 100644 src/notifier.py diff --git a/.env.prod b/.env.prod index 51505de..42512bd 100644 --- a/.env.prod +++ b/.env.prod @@ -1 +1,3 @@ REFLEX_SECRET_KEY=af16a3c0a6f2a94583ebd704f4e9716743abe27c10e8837633274d08441c20c2 +TELEGRAM_BOT_TOKEN=8659950969:AAEpc3sl34txpsHyYC5-3rnfgVnkEuQoU_Q +TELEGRAM_CHAT_ID=-4992234358 diff --git a/data/class_href_cache.json b/data/class_href_cache.json index 96f56df..2182949 100644 --- a/data/class_href_cache.json +++ b/data/class_href_cache.json @@ -1,5 +1,6 @@ { - "EM-AU 2:abs": "https://escadaweb.vs.ch/Lehrpersonen/ViewAbsenzenErweitert.aspx?id=38bbf90d-51da-406e-a2af-4d5f8f5958bd", "AUTOMAT 1:abs": "https://escadaweb.vs.ch/Lehrpersonen/ViewAbsenzenErweitert.aspx?id=687fa97d-1032-4078-94ae-1899fc1e6014", - "AUTOMAT 2:abs": "https://escadaweb.vs.ch/Lehrpersonen/ViewAbsenzenErweitert.aspx?id=8cb48a35-290c-4488-b98c-437d2c9186a6" + "AUTOMAT 2:abs": "https://escadaweb.vs.ch/Lehrpersonen/ViewAbsenzenErweitert.aspx?id=8cb48a35-290c-4488-b98c-437d2c9186a6", + "EM-AU 1:abs": "https://escadaweb.vs.ch/Lehrpersonen/ViewAbsenzenErweitert.aspx?id=da3e0b68-5559-4c0c-a8be-f764c68dbca9", + "EM-AU 2:abs": "https://escadaweb.vs.ch/Lehrpersonen/ViewAbsenzenErweitert.aspx?id=ef32322a-8bd9-45d4-9583-c7c22cbc577d" } \ No newline at end of file diff --git a/eptm_dashboard/eptm_dashboard.py b/eptm_dashboard/eptm_dashboard.py index 93462b6..9d9b115 100644 --- a/eptm_dashboard/eptm_dashboard.py +++ b/eptm_dashboard/eptm_dashboard.py @@ -6,6 +6,7 @@ from .pages.fiche import fiche_page, FicheState from .pages.classe import classe_page, ClasseState from .pages.escada import escada_page, EscadaState from .pages.logs import logs_page, LogsState +from .pages.cron import cron_page, CronState from .pages.users import users_page, UsersState from .pages.params import params_page, ParamsState @@ -30,5 +31,6 @@ app.add_page(fiche_page, route="/fiche", on_load=[AuthState.check_auth, app.add_page(classe_page, route="/classe", on_load=[AuthState.check_auth, ClasseState.load_data], title=TITLE) app.add_page(escada_page, route="/escada", on_load=[AuthState.check_auth, EscadaState.load_data], title=TITLE) app.add_page(logs_page, route="/logs", on_load=[AuthState.check_auth, LogsState.load_data], title=TITLE) +app.add_page(cron_page, route="/cron", on_load=[AuthState.check_auth, CronState.load_data], title=TITLE) app.add_page(users_page, route="/users", on_load=[AuthState.check_auth, UsersState.load_data], title=TITLE) app.add_page(params_page, route="/params", on_load=[AuthState.check_auth, ParamsState.load_data], title=TITLE) diff --git a/eptm_dashboard/pages/cron.py b/eptm_dashboard/pages/cron.py new file mode 100644 index 0000000..9b6ce0d --- /dev/null +++ b/eptm_dashboard/pages/cron.py @@ -0,0 +1,885 @@ +"""Page /cron — gestion des tâches planifiées Escada (push / sync auto).""" + +from __future__ import annotations + +import json +import os +import sys +from datetime import datetime, timedelta +from pathlib import Path + +import reflex as rx +from sqlalchemy import select + +# Pour les imports src/* qui n'ont pas systématiquement le path setup +_ROOT = Path(__file__).resolve().parent.parent.parent +if str(_ROOT) not in sys.path: + sys.path.insert(0, str(_ROOT)) + +from src.db import CronJob, get_session, init_db, Apprenti # noqa: E402 +from src.notifier import test_telegram # noqa: E402 + +from ..state import AuthState +from ..sidebar import layout + + +_DAY_NAMES = ["MON", "TUE", "WED", "THU", "FRI", "SAT", "SUN"] +_DAY_LABELS = { + "MON": "Lun", "TUE": "Mar", "WED": "Mer", "THU": "Jeu", + "FRI": "Ven", "SAT": "Sam", "SUN": "Dim", +} + + +# ── State ───────────────────────────────────────────────────────────────────── + +class CronState(AuthState): + jobs: list[dict] = [] + classes_avail: list[str] = [] + + # Édition + editing_id: int = 0 # 0 = nouveau, sinon id du job en édition + edit_open: bool = False + + f_name: str = "" + f_enabled: bool = True + f_schedule_kind: str = "daily" # "daily" | "weekly" | "interval" + f_time_hh: str = "03" + f_time_mm: str = "00" + f_interval_min: str = "60" + f_days: list[str] = [] # ["MON","WED",...] + f_task_kind: str = "push_then_sync" + f_sync_abs: bool = True + f_sync_bn: bool = True + f_sync_notes: bool = True + f_sync_fiches: bool = False + f_force_abs: bool = False + f_classes_all: bool = True + f_classes: list[str] = [] + f_notify_on: str = "failure" + f_notify_level: str = "normal" + f_notify_chat_id: str = "" + + save_error: str = "" + save_ok: bool = False + + tg_test_msg: str = "" + tg_test_ok: bool = False + + # ── Helpers ─────────────────────────────────────────────────────────────── + + def _ensure_admin(self): + return self.authenticated and self.role == "admin" + + def _job_to_dict(self, job: CronJob) -> dict: + # Calcul d'une description lisible + desc = self._human_schedule(job.schedule_kind, job.schedule_value) + next_run = self._next_run_estimate(job) + return { + "id": job.id, + "name": job.name, + "enabled": job.enabled, + "schedule_desc": desc, + "task_kind": job.task_kind, + "task_label": {"push": "Push", "sync": "Sync", + "push_then_sync": "Push + Sync"}.get(job.task_kind, job.task_kind), + "last_run_at": job.last_run_at.strftime("%d.%m.%Y %H:%M") if job.last_run_at else "", + "last_status": job.last_status, + "last_message": job.last_message[:120] if job.last_message else "", + "last_log_path": job.last_log_path or "", + "next_run": next_run, + "notify_on": job.notify_on, + } + + @staticmethod + def _human_schedule(kind: str, value: str) -> str: + if kind == "daily": + return f"Tous les jours à {value}" + if kind == "weekly": + try: + days_part, time_part = value.split(":", 1) + labels = ", ".join(_DAY_LABELS.get(d.strip(), d.strip()) + for d in days_part.split(",")) + return f"{labels} à {time_part}" + except ValueError: + return value + if kind == "interval": + try: + m = int(value) + if m % 60 == 0: + return f"Toutes les {m // 60} h" + return f"Toutes les {m} min" + except (TypeError, ValueError): + return value + return value + + @staticmethod + def _next_run_estimate(job: CronJob) -> str: + """Estime grossièrement la prochaine exécution (lisible).""" + if not job.enabled: + return "—" + now = datetime.now() + if job.schedule_kind == "interval": + try: + m = int(job.schedule_value) + except (TypeError, ValueError): + return "—" + if job.last_run_at is None: + return "Au prochain tick" + nxt = job.last_run_at + timedelta(minutes=m) + return nxt.strftime("%d.%m %H:%M") + if job.schedule_kind == "daily": + try: + hh, mm = job.schedule_value.split(":") + target = now.replace(hour=int(hh), minute=int(mm), second=0, microsecond=0) + if (job.last_run_at and job.last_run_at.date() == now.date() + and job.last_run_at >= target): + target += timedelta(days=1) + elif target < now: + target += timedelta(days=1) + return target.strftime("%d.%m %H:%M") + except (ValueError, AttributeError): + return "—" + if job.schedule_kind == "weekly": + return "Selon planning" + return "—" + + # ── Load / refresh ──────────────────────────────────────────────────────── + + def load_data(self): + if not self.authenticated: + return rx.redirect("/login") + if self.role != "admin": + return rx.redirect("/accueil") + # Garantit que la table existe + try: + init_db() + except Exception: + pass + self._refresh() + + def _refresh(self): + sess = get_session() + try: + jobs = sess.execute(select(CronJob).order_by(CronJob.id)).scalars().all() + self.jobs = [self._job_to_dict(j) for j in jobs] + + # Liste des classes disponibles (réutilisé par le widget) + classes = sess.execute( + select(Apprenti.classe).distinct().order_by(Apprenti.classe) + ).scalars().all() + self.classes_avail = [c for c in classes if c and not c.startswith(("MP", "MI"))] + finally: + sess.close() + + # ── Form actions ────────────────────────────────────────────────────────── + + def open_new(self): + self.editing_id = 0 + self.edit_open = True + self.f_name = "" + self.f_enabled = True + self.f_schedule_kind = "daily" + self.f_time_hh = "03" + self.f_time_mm = "00" + self.f_interval_min = "60" + self.f_days = ["MON", "TUE", "WED", "THU", "FRI"] + self.f_task_kind = "push_then_sync" + self.f_sync_abs = True + self.f_sync_bn = True + self.f_sync_notes = True + self.f_sync_fiches = False + self.f_force_abs = False + self.f_classes_all = True + self.f_classes = [] + self.f_notify_on = "failure" + self.f_notify_level = "normal" + self.f_notify_chat_id = "" + self.save_error = "" + self.save_ok = False + + def open_edit(self, job_id: int): + sess = get_session() + try: + job = sess.get(CronJob, job_id) + if not job: + return + self.editing_id = job.id + self.edit_open = True + self.f_name = job.name + self.f_enabled = job.enabled + self.f_schedule_kind = job.schedule_kind + if job.schedule_kind == "daily": + hh, _, mm = (job.schedule_value or "03:00").partition(":") + self.f_time_hh = hh.zfill(2) + self.f_time_mm = mm.zfill(2) + self.f_interval_min = "60" + self.f_days = ["MON", "TUE", "WED", "THU", "FRI"] + elif job.schedule_kind == "weekly": + try: + days_part, time_part = job.schedule_value.split(":", 1) + hh, _, mm = time_part.partition(":") + self.f_time_hh = hh.zfill(2) + self.f_time_mm = mm.zfill(2) + self.f_days = [d.strip() for d in days_part.split(",") if d.strip()] + except ValueError: + self.f_time_hh = "03" + self.f_time_mm = "00" + self.f_days = ["MON", "TUE", "WED", "THU", "FRI"] + self.f_interval_min = "60" + else: # interval + self.f_interval_min = job.schedule_value or "60" + self.f_time_hh = "03" + self.f_time_mm = "00" + self.f_days = ["MON", "TUE", "WED", "THU", "FRI"] + + self.f_task_kind = job.task_kind + self.f_sync_abs = job.sync_abs + self.f_sync_bn = job.sync_bn + self.f_sync_notes = job.sync_notes + self.f_sync_fiches = job.sync_fiches + self.f_force_abs = job.force_abs + + classes_raw = (job.classes_json or "ALL").strip() + if classes_raw == "ALL": + self.f_classes_all = True + self.f_classes = [] + else: + try: + lst = json.loads(classes_raw) + self.f_classes_all = False + self.f_classes = [str(c) for c in (lst if isinstance(lst, list) else [])] + except Exception: + self.f_classes_all = True + self.f_classes = [] + + self.f_notify_on = job.notify_on + self.f_notify_level = getattr(job, "notify_level", "normal") or "normal" + self.f_notify_chat_id = job.notify_chat_id or "" + self.save_error = "" + self.save_ok = False + finally: + sess.close() + + def close_edit(self): + self.edit_open = False + self.save_error = "" + self.save_ok = False + + def set_f_name(self, v: str): self.f_name = v + def set_f_enabled(self, v: bool): self.f_enabled = v + def set_f_schedule_kind(self, v: str): self.f_schedule_kind = v + def set_f_time_hh(self, v: str): + v = "".join(ch for ch in v if ch.isdigit())[:2] + self.f_time_hh = v + def set_f_time_mm(self, v: str): + v = "".join(ch for ch in v if ch.isdigit())[:2] + self.f_time_mm = v + def set_f_interval_min(self, v: str): + v = "".join(ch for ch in v if ch.isdigit())[:5] + self.f_interval_min = v + def toggle_f_day(self, day: str): + if day in self.f_days: + self.f_days = [d for d in self.f_days if d != day] + else: + self.f_days = self.f_days + [day] + def set_f_task_kind(self, v: str): self.f_task_kind = v + def set_f_sync_abs(self, v: bool): self.f_sync_abs = v + def set_f_sync_bn(self, v: bool): self.f_sync_bn = v + def set_f_sync_notes(self, v: bool): self.f_sync_notes = v + def set_f_sync_fiches(self, v: bool): self.f_sync_fiches = v + def set_f_force_abs(self, v: bool): self.f_force_abs = v + def set_f_classes_all(self, v: bool): self.f_classes_all = v + def toggle_f_class(self, c: str): + if c in self.f_classes: + self.f_classes = [x for x in self.f_classes if x != c] + else: + self.f_classes = self.f_classes + [c] + def set_f_notify_on(self, v: str): self.f_notify_on = v + def set_f_notify_level(self, v: str): self.f_notify_level = v + def set_f_notify_chat_id(self, v: str): self.f_notify_chat_id = v.strip() + + def save_job(self): + self.save_error = "" + self.save_ok = False + + if not self.f_name.strip(): + self.save_error = "Le nom est obligatoire." + return + + # Construire schedule_value selon kind + if self.f_schedule_kind == "daily": + try: + hh = int(self.f_time_hh or "0"); mm = int(self.f_time_mm or "0") + if not (0 <= hh < 24 and 0 <= mm < 60): + raise ValueError + except ValueError: + self.save_error = "Heure invalide." + return + schedule_value = f"{hh:02d}:{mm:02d}" + elif self.f_schedule_kind == "weekly": + if not self.f_days: + self.save_error = "Sélectionne au moins un jour de la semaine." + return + try: + hh = int(self.f_time_hh or "0"); mm = int(self.f_time_mm or "0") + if not (0 <= hh < 24 and 0 <= mm < 60): + raise ValueError + except ValueError: + self.save_error = "Heure invalide." + return + ordered = [d for d in _DAY_NAMES if d in self.f_days] + schedule_value = f"{','.join(ordered)}:{hh:02d}:{mm:02d}" + else: # interval + try: + m = int(self.f_interval_min or "0") + if m < 1: + raise ValueError + except ValueError: + self.save_error = "Intervalle invalide (minutes > 0)." + return + schedule_value = str(m) + + if self.f_classes_all: + classes_json = "ALL" + else: + if not self.f_classes: + self.save_error = "Sélectionne au moins une classe (ou coche \"Toutes les classes\")." + return + classes_json = json.dumps(self.f_classes) + + sess = get_session() + try: + now = datetime.now() + if self.editing_id == 0: + job = CronJob( + name=self.f_name.strip(), + enabled=self.f_enabled, + schedule_kind=self.f_schedule_kind, + schedule_value=schedule_value, + task_kind=self.f_task_kind, + sync_abs=self.f_sync_abs, + sync_bn=self.f_sync_bn, + sync_notes=self.f_sync_notes, + sync_fiches=self.f_sync_fiches, + force_abs=self.f_force_abs, + classes_json=classes_json, + notify_on=self.f_notify_on, + notify_level=self.f_notify_level, + notify_chat_id=self.f_notify_chat_id, + created_at=now, + updated_at=now, + ) + sess.add(job) + else: + job = sess.get(CronJob, self.editing_id) + if not job: + self.save_error = "Job introuvable." + return + job.name = self.f_name.strip() + job.enabled = self.f_enabled + job.schedule_kind = self.f_schedule_kind + job.schedule_value = schedule_value + job.task_kind = self.f_task_kind + job.sync_abs = self.f_sync_abs + job.sync_bn = self.f_sync_bn + job.sync_notes = self.f_sync_notes + job.sync_fiches = self.f_sync_fiches + job.force_abs = self.f_force_abs + job.classes_json = classes_json + job.notify_on = self.f_notify_on + job.notify_level = self.f_notify_level + job.notify_chat_id = self.f_notify_chat_id + job.updated_at = now + sess.commit() + self.save_ok = True + self._refresh() + self.edit_open = False + except Exception as e: + sess.rollback() + self.save_error = f"Erreur DB : {e}" + finally: + sess.close() + + def toggle_enabled(self, job_id: int): + sess = get_session() + try: + job = sess.get(CronJob, job_id) + if job: + job.enabled = not job.enabled + job.updated_at = datetime.now() + sess.commit() + self._refresh() + finally: + sess.close() + + def delete_job(self, job_id: int): + sess = get_session() + try: + job = sess.get(CronJob, job_id) + if job: + sess.delete(job) + sess.commit() + self._refresh() + finally: + sess.close() + + def test_telegram(self): + ok, msg = test_telegram() + self.tg_test_ok = ok + self.tg_test_msg = msg + + +# ── UI components ───────────────────────────────────────────────────────────── + +def _badge_status(status: rx.Var) -> rx.Component: + return rx.match( + status, + ("ok", rx.badge("OK", color_scheme="green", variant="soft", size="1")), + ("fail", rx.badge("Échec", color_scheme="red", variant="soft", size="1")), + ("running", rx.badge("Running", color_scheme="orange", variant="soft", size="1")), + rx.badge("—", color_scheme="gray", variant="soft", size="1"), + ) + + +def _job_row(job: rx.Var) -> rx.Component: + return rx.box( + rx.grid( + # Nom + statut + rx.vstack( + rx.hstack( + rx.text(job["name"], weight="bold", size="2"), + rx.cond( + job["enabled"], + rx.badge("Actif", color_scheme="green", variant="soft", size="1"), + rx.badge("Désactivé", color_scheme="gray", variant="soft", size="1"), + ), + spacing="2", align="center", wrap="wrap", + ), + rx.text(job["schedule_desc"], size="1", color="var(--gray-10)"), + spacing="0", align="start", + ), + # Tâche + rx.text(job["task_label"], size="2"), + # Dernière exécution + rx.vstack( + rx.text( + rx.cond(job["last_run_at"] != "", job["last_run_at"], "—"), + size="1", + ), + _badge_status(job["last_status"]), + spacing="1", align="start", + ), + # Prochaine exécution + rx.text(job["next_run"], size="2", color="var(--gray-10)"), + # Actions + rx.hstack( + rx.button( + rx.icon(rx.cond(job["enabled"], "pause", "play"), size=14), + on_click=CronState.toggle_enabled(job["id"]), + variant="ghost", size="1", color_scheme="gray", + ), + rx.button( + rx.icon("pencil", size=14), + on_click=CronState.open_edit(job["id"]), + variant="ghost", size="1", color_scheme="gray", + ), + rx.alert_dialog.root( + rx.alert_dialog.trigger( + rx.button(rx.icon("trash-2", size=14), + variant="ghost", size="1", color_scheme="red"), + ), + rx.alert_dialog.content( + rx.alert_dialog.title("Supprimer ce job ?"), + rx.alert_dialog.description( + "Le job sera supprimé. Les fichiers de log conservés." + ), + rx.flex( + rx.alert_dialog.cancel(rx.button("Annuler", variant="soft")), + rx.alert_dialog.action( + rx.button("Supprimer", color_scheme="red", + on_click=CronState.delete_job(job["id"])), + ), + gap="2", justify="end", margin_top="1rem", + ), + ), + ), + spacing="1", align="center", + ), + columns="2.5fr 1fr 1.3fr 1.2fr auto", + gap="0.75rem", + align="center", + width="100%", + ), + rx.cond( + job["last_message"] != "", + rx.text( + "↳ ", job["last_message"], + size="1", color="var(--gray-10)", margin_top="0.25rem", + ), + ), + padding="0.75rem 1rem", + background_color="white", + border="1px solid var(--gray-5)", + border_radius="6px", + width="100%", + ) + + +def _form_schedule_picker() -> rx.Component: + return rx.vstack( + rx.text("Planification", size="2", font_weight="600"), + rx.radio( + ["daily", "weekly", "interval"], + value=CronState.f_schedule_kind, + on_change=CronState.set_f_schedule_kind, + direction="row", + ), + rx.cond( + CronState.f_schedule_kind == "interval", + rx.hstack( + rx.text("Toutes les", size="2"), + rx.input( + value=CronState.f_interval_min, + on_change=CronState.set_f_interval_min, + width="80px", + ), + rx.text("minutes", size="2"), + spacing="2", align="center", + ), + rx.cond( + CronState.f_schedule_kind == "weekly", + rx.vstack( + rx.flex( + *[ + rx.box( + rx.text(_DAY_LABELS[d], size="1", weight="bold"), + on_click=CronState.toggle_f_day(d), + cursor="pointer", + padding="0.35rem 0.7rem", + border_radius="6px", + border="2px solid", + border_color=rx.cond( + CronState.f_days.contains(d), + "var(--red-9)", "var(--gray-6)", + ), + background_color=rx.cond( + CronState.f_days.contains(d), + "var(--red-9)", "transparent", + ), + color=rx.cond( + CronState.f_days.contains(d), + "white", "var(--gray-12)", + ), + ) + for d in _DAY_NAMES + ], + gap="0.3rem", + wrap="wrap", + ), + rx.hstack( + rx.text("Heure :", size="2"), + rx.input(value=CronState.f_time_hh, + on_change=CronState.set_f_time_hh, width="60px"), + rx.text(":", size="3"), + rx.input(value=CronState.f_time_mm, + on_change=CronState.set_f_time_mm, width="60px"), + spacing="2", align="center", + ), + spacing="2", + ), + # daily + rx.hstack( + rx.text("Heure :", size="2"), + rx.input(value=CronState.f_time_hh, + on_change=CronState.set_f_time_hh, width="60px"), + rx.text(":", size="3"), + rx.input(value=CronState.f_time_mm, + on_change=CronState.set_f_time_mm, width="60px"), + spacing="2", align="center", + ), + ), + ), + spacing="2", width="100%", + ) + + +def _form_task_picker() -> rx.Component: + return rx.vstack( + rx.text("Tâche", size="2", font_weight="600"), + rx.radio( + ["push", "sync", "push_then_sync"], + value=CronState.f_task_kind, + on_change=CronState.set_f_task_kind, + direction="column", + ), + rx.cond( + CronState.f_task_kind != "push", + rx.vstack( + rx.text("Données à synchroniser", size="2", font_weight="600", + margin_top="0.5rem"), + rx.flex( + rx.hstack( + rx.checkbox(checked=CronState.f_sync_abs, + on_change=CronState.set_f_sync_abs, size="2"), + rx.text("Absences", size="2"), + spacing="2", align="center", + ), + rx.hstack( + rx.checkbox(checked=CronState.f_sync_bn, + on_change=CronState.set_f_sync_bn, size="2"), + rx.text("BN + Matu", size="2"), + spacing="2", align="center", + ), + rx.hstack( + rx.checkbox(checked=CronState.f_sync_notes, + on_change=CronState.set_f_sync_notes, size="2"), + rx.text("Notes d'examen", size="2"), + spacing="2", align="center", + ), + rx.hstack( + rx.checkbox(checked=CronState.f_sync_fiches, + on_change=CronState.set_f_sync_fiches, size="2"), + rx.text("Fiches apprentis", size="2"), + spacing="2", align="center", + ), + gap="0.5rem 1.25rem", + flex_wrap="wrap", + ), + rx.hstack( + rx.checkbox(checked=CronState.f_force_abs, + on_change=CronState.set_f_force_abs, size="2"), + rx.text("Forcer le retéléchargement des PDFs absences", size="2"), + spacing="2", align="center", + ), + spacing="2", + ), + ), + spacing="2", width="100%", + ) + + +def _form_classes_picker() -> rx.Component: + return rx.vstack( + rx.text("Classes", size="2", font_weight="600"), + rx.hstack( + rx.checkbox(checked=CronState.f_classes_all, + on_change=CronState.set_f_classes_all, size="2"), + rx.text("Toutes les classes", size="2"), + spacing="2", align="center", + ), + rx.cond( + ~CronState.f_classes_all, + rx.flex( + rx.foreach( + CronState.classes_avail, + lambda c: rx.box( + rx.text(c, size="1"), + on_click=CronState.toggle_f_class(c), + cursor="pointer", + padding="0.3rem 0.6rem", + border_radius="9999px", + border="1px solid", + border_color=rx.cond( + CronState.f_classes.contains(c), + "var(--red-9)", "var(--gray-6)", + ), + background_color=rx.cond( + CronState.f_classes.contains(c), + "var(--red-9)", "transparent", + ), + color=rx.cond( + CronState.f_classes.contains(c), + "white", "var(--gray-12)", + ), + ), + ), + gap="0.3rem", + wrap="wrap", + ), + ), + spacing="2", width="100%", + ) + + +def _form_notify_picker() -> rx.Component: + return rx.vstack( + rx.text("Notifications Telegram", size="2", font_weight="600"), + + rx.text("Quand notifier", size="1", color="var(--gray-10)"), + rx.radio( + ["never", "failure", "success", "always"], + value=CronState.f_notify_on, + on_change=CronState.set_f_notify_on, + direction="row", + ), + + rx.text("Niveau de détail", size="1", color="var(--gray-10)", margin_top="0.25rem"), + rx.radio( + ["normal", "detailed"], + value=CronState.f_notify_level, + on_change=CronState.set_f_notify_level, + direction="row", + ), + rx.text( + rx.cond( + CronState.f_notify_level == "detailed", + "Détaillée : nom + statut + durée + classes importées + détail BN/notes/Matu + (nouvelles / modifiées / pending) absences", + "Normal : nom + statut + durée uniquement", + ), + size="1", + color="var(--gray-9)", + ), + + rx.input( + placeholder="Chat ID Telegram (vide = défaut configuré côté serveur)", + value=CronState.f_notify_chat_id, + on_change=CronState.set_f_notify_chat_id, + width="100%", + ), + spacing="2", width="100%", + ) + + +def _edit_form() -> rx.Component: + return rx.box( + rx.vstack( + rx.hstack( + rx.text( + rx.cond(CronState.editing_id == 0, + "Nouveau job", "Modifier le job"), + weight="bold", size="3", + ), + rx.spacer(), + rx.button(rx.icon("x", size=14), + on_click=CronState.close_edit, + variant="ghost", color_scheme="gray", size="1"), + width="100%", align="center", + ), + rx.divider(), + rx.input( + placeholder="Nom (ex: Sync nocturne)", + value=CronState.f_name, + on_change=CronState.set_f_name, + width="100%", + ), + rx.hstack( + rx.checkbox(checked=CronState.f_enabled, + on_change=CronState.set_f_enabled, size="2"), + rx.text("Activé", size="2"), + spacing="2", align="center", + ), + rx.divider(), + _form_schedule_picker(), + rx.divider(), + _form_task_picker(), + rx.divider(), + _form_classes_picker(), + rx.divider(), + _form_notify_picker(), + rx.cond( + CronState.save_error != "", + rx.box( + rx.text(CronState.save_error, color="red", size="2"), + padding="0.5rem 1rem", + background_color="#fff5f5", + border="1px solid #ffcccc", + border_radius="6px", + width="100%", + ), + ), + rx.hstack( + rx.button("Enregistrer", on_click=CronState.save_job, + color_scheme="indigo", size="2"), + rx.button("Annuler", on_click=CronState.close_edit, + variant="soft", color_scheme="gray", size="2"), + spacing="2", + ), + spacing="3", + width="100%", + ), + padding="1.25rem", + background_color="var(--blue-2)", + border_radius="8px", + border="1px solid var(--blue-6)", + width="100%", + ) + + +def _telegram_test_box() -> rx.Component: + return rx.box( + rx.hstack( + rx.text("Notifications Telegram", weight="bold", size="2"), + rx.spacer(), + rx.button( + rx.icon("send", size=14), + "Envoyer un test", + on_click=CronState.test_telegram, + variant="outline", size="1", color_scheme="indigo", + ), + width="100%", align="center", + ), + rx.cond( + CronState.tg_test_msg != "", + rx.text( + CronState.tg_test_msg, + size="1", + color=rx.cond(CronState.tg_test_ok, "var(--green-11)", "var(--red-11)"), + margin_top="0.5rem", + ), + ), + padding="0.75rem 1rem", + background_color="var(--gray-2)", + border="1px solid var(--gray-5)", + border_radius="6px", + width="100%", + ) + + +# ── Page ────────────────────────────────────────────────────────────────────── + +def cron_page() -> rx.Component: + return layout( + rx.vstack( + rx.hstack( + rx.heading("Tâches planifiées", size="7"), + rx.spacer(), + rx.button( + rx.icon("plus", size=14), "Nouveau job", + on_click=CronState.open_new, + color_scheme="indigo", size="2", + ), + width="100%", align="center", + ), + rx.text( + "Planification automatique des opérations Escada (push, sync). " + "Le serveur lance ces jobs selon leur horaire (timezone Europe/Zurich).", + size="2", color="var(--gray-10)", + ), + _telegram_test_box(), + rx.cond( + CronState.edit_open, + _edit_form(), + ), + rx.cond( + CronState.jobs.length() == 0, + rx.box( + rx.text( + "Aucun job planifié. Clique sur \"Nouveau job\" pour en créer un.", + size="2", color="#666", + ), + padding="1rem", + background_color="#e3f2fd", + border_radius="6px", + border="1px solid #90caf9", + width="100%", + ), + rx.vstack( + rx.foreach(CronState.jobs, _job_row), + spacing="2", + width="100%", + ), + ), + spacing="4", + width="100%", + ) + ) diff --git a/eptm_dashboard/pages/logs.py b/eptm_dashboard/pages/logs.py index e03f478..3a52c2e 100644 --- a/eptm_dashboard/pages/logs.py +++ b/eptm_dashboard/pages/logs.py @@ -7,14 +7,18 @@ import reflex as rx from ..state import AuthState from ..sidebar import layout -_ROOT = Path(__file__).resolve().parent.parent.parent -DATA_DIR = Path(os.getenv("DATA_DIR", str(_ROOT / "data"))) -_LOG_FILE = DATA_DIR / "logs" / "operations.log" +_ROOT = Path(__file__).resolve().parent.parent.parent +DATA_DIR = Path(os.getenv("DATA_DIR", str(_ROOT / "data"))) +_LOG_FILE = DATA_DIR / "logs" / "operations.log" +# Logs cron : bind-mount persistent /logs/cron (override via env si besoin) +_CRON_DIR = Path(os.getenv("CRON_LOG_DIR", "/logs/cron")) # ── State ────────────────────────────────────────────────────────────────────── class LogsState(AuthState): + # Source: "ops" | "cron:" + source: str = "ops" log_level: str = "PROD" log_content: str = "" log_total: int = 0 @@ -22,7 +26,30 @@ class LogsState(AuthState): log_empty: bool = True confirm_clear: bool = False - def _read_log(self): + # Liste des logs cron disponibles (filenames seulement) + cron_logs: list[dict] = [] + + def _refresh_cron_list(self): + if not _CRON_DIR.exists(): + self.cron_logs = [] + return + entries = [] + for fp in sorted(_CRON_DIR.glob("job_*.log"), reverse=True): + try: + stat = fp.stat() + size_kb = f"{stat.st_size / 1024:.1f} Ko" + entries.append({ + "name": fp.name, + "size": size_kb, + "mtime": stat.st_mtime, + "label": f"{fp.name} ({size_kb})", + "value": f"cron:{fp.name}", + }) + except Exception: + continue + self.cron_logs = entries[:200] # cap à 200 + + def _read_ops_log(self): if not _LOG_FILE.exists() or _LOG_FILE.stat().st_size == 0: self.log_empty = True self.log_content = "" @@ -44,18 +71,50 @@ class LogsState(AuthState): self.log_content = raw self.log_shown = self.log_total + def _read_cron_log(self, filename: str): + # Sanitize : forcer fichier dans _CRON_DIR + target = (_CRON_DIR / filename).resolve() + if not str(target).startswith(str(_CRON_DIR.resolve())): + self.log_empty = True + self.log_content = "Chemin invalide." + return + if not target.exists(): + self.log_empty = True + self.log_content = "" + self.log_total = 0 + self.log_shown = 0 + return + raw = target.read_text(encoding="utf-8", errors="replace") + lines = raw.splitlines() + self.log_content = raw + self.log_total = len(lines) + self.log_shown = len(lines) + self.log_empty = len(lines) == 0 + + def _read_log(self): + if self.source.startswith("cron:"): + self._read_cron_log(self.source.split(":", 1)[1]) + else: + self._read_ops_log() + def load_data(self): if not self.authenticated: return rx.redirect("/login") + self._refresh_cron_list() self._read_log() def refresh(self): + self._refresh_cron_list() self._read_log() def set_log_level(self, level: str): self.log_level = level self._read_log() + def select_source(self, value: str): + self.source = value + self._read_log() + def ask_clear(self): self.confirm_clear = True @@ -71,9 +130,14 @@ class LogsState(AuthState): self._read_log() def download_logs(self): + if self.source.startswith("cron:"): + filename = self.source.split(":", 1)[1] + target = (_CRON_DIR / filename).resolve() + if not str(target).startswith(str(_CRON_DIR.resolve())) or not target.exists(): + return + return rx.download(data=target.read_bytes(), filename=filename) if _LOG_FILE.exists(): - raw = _LOG_FILE.read_bytes() - return rx.download(data=raw, filename="operations.log") + return rx.download(data=_LOG_FILE.read_bytes(), filename="operations.log") # ── UI ───────────────────────────────────────────────────────────────────────── @@ -169,6 +233,37 @@ def _log_display() -> rx.Component: ) +def _source_picker() -> rx.Component: + """Dropdown : log opérations ou un fichier de log cron individuel.""" + return rx.hstack( + rx.text("Source :", size="2", weight="medium", color="#555"), + rx.select.root( + rx.select.trigger(), + rx.select.content( + rx.select.group( + rx.select.label("Application"), + rx.select.item("Opérations (operations.log)", value="ops"), + ), + rx.cond( + LogsState.cron_logs.length() > 0, + rx.select.group( + rx.select.label("Cron jobs"), + rx.foreach( + LogsState.cron_logs, + lambda l: rx.select.item(l["label"], value=l["value"]), + ), + ), + ), + ), + value=LogsState.source, + on_change=LogsState.select_source, + size="1", + ), + align="center", + gap="0.375rem", + ) + + def logs_page() -> rx.Component: return layout( rx.vstack( @@ -176,6 +271,7 @@ def logs_page() -> rx.Component: rx.flex( rx.heading("Logs", size="6"), rx.flex( + _source_picker(), rx.hstack( rx.text("Niveau :", size="2", weight="medium", color="#555"), rx.select( @@ -204,7 +300,10 @@ def logs_page() -> rx.Component: variant="soft", disabled=LogsState.log_empty, ), - _clear_zone(), + rx.cond( + LogsState.source == "ops", + _clear_zone(), + ), gap="0.5rem", align="center", flex_wrap="wrap", diff --git a/eptm_dashboard/sidebar.py b/eptm_dashboard/sidebar.py index d83cefa..a2e8af3 100644 --- a/eptm_dashboard/sidebar.py +++ b/eptm_dashboard/sidebar.py @@ -23,6 +23,7 @@ _PAGES = [ _ADMIN_PAGES = [ ("Escada", "/escada", "globe"), + ("Cron", "/cron", "alarm-clock"), ("Logs", "/logs", "file-text"), ("Utilisateurs", "/users", "user-cog"), ("Parametres", "/params", "settings"), diff --git a/logs/cron/job_3_20260510-092902.log b/logs/cron/job_3_20260510-092902.log new file mode 100644 index 0000000..53382f2 --- /dev/null +++ b/logs/cron/job_3_20260510-092902.log @@ -0,0 +1,141 @@ + +=== Job #3 'Import absences toutes les 2h' — démarré 2026-05-10T09:29:02 === +task_kind=sync classes=["EM-AU 1", "EM-AU 2"] + +━━━ Sync Escada ━━━ +$ /usr/local/bin/python3 /app/scripts/sync_esacada.py --sync-all EM-AU 1 EM-AU 2 --skip-fiches +[11:29:04] SESSION_EXPIRED +[11:29:05] [LOGIN] Identifiants configurés — connexion automatique en cours. +[11:29:05] [2FA] Secret TOTP configure - code saisi automatiquement quand demande. +[11:29:05] [LOGIN] url: https://edusso.apps.vs.ch/auth/realms/ictvs/protocol/openid-connect/auth?client_id=EscadaWeb&respons +[11:29:05] [LOGIN] Formulaire Keycloak détecté — saisie automatique des identifiants. +[11:29:06] [2FA] Tentative remplissage OTP sur: https://edusso.apps.vs.ch/auth/realms/ictvs/login-actions/authenticate?execution +[11:29:06] [2FA] JS fill result: filled +[11:29:06] [2FA] OTP saisi via JS — soumission du formulaire. +[11:29:06] [2FA] submit result: clicked +[11:29:08] LOGIN_OK +[11:29:08] TOTAL 2 +[11:29:08] PROGRESS 1/2 EM-AU 1 +[11:29:08] [LANG] Navigation vers DlgEinstellungen… +[11:29:09] [LANG] Valeur actuelle: 'français' +[11:29:09] [LANG] Déjà en français +[11:29:10] [lrn p=1] 'EM-AU 1' +[11:29:10] [lrn] lien trouvé : ViewLernende.aspx?id=fc04fbce-a012-42dd-9cad-67f8894ee59c +[11:29:12] [lrn] OK bouton BN présent +[11:29:12] [NOTES EM-AU 1] href direct: https://escadaweb.vs.ch/Lehrpersonen/Reports/RptEscada.aspx?id=f4d1a632-37b7-429 +[11:29:16] OK NOTES EM-AU 1 [href size=139611] +[11:29:18] [BN] +1s attente… +[11:29:22] [BN] +5s attente… +[11:29:23] [BN] download main page capturé: Bulletin de note.pdf +[11:29:23] OK BN EM-AU 1 [main download size=92774] +[11:29:25] [scan page=1] recherche 'EM-AU 1'… +[11:29:25] [scan page=1] -> TROUVE +[11:29:26] [nav abs] url après nav: https://escadaweb.vs.ch/Lehrpersonen/ViewAbsenzenErweitert.aspx?id=da3e0b68-5559 +[11:29:26] [abs nav] url: https://escadaweb.vs.ch/Lehrpersonen/ViewAbsenzenErweitert.aspx?id=da3e0b68-5559 +[11:29:26] [abs] page url avant clic: https://escadaweb.vs.ch/Lehrpersonen/ViewAbsenzenErweitert.aspx?id=da3e0b68-5559 +[11:29:26] [abs] GET direct: https://escadaweb.vs.ch/Lehrpersonen/Reports/RptEscada.aspx?id=ac3993d6-86c2-404 +[11:29:28] OK EM-AU 1 [href size=74557] +[11:29:28] PROGRESS 2/2 EM-AU 2 +[11:29:29] [lrn p=1] 'EM-AU 2' +[11:29:29] [lrn] lien trouvé : ViewLernende.aspx?id=a736d575-58b6-40e6-84f4-c5a6aa87cb9b +[11:29:31] [lrn] OK bouton BN présent +[11:29:31] [NOTES EM-AU 2] href direct: https://escadaweb.vs.ch/Lehrpersonen/Reports/RptEscada.aspx?id=cf3257f8-9c54-410 +[11:29:33] OK NOTES EM-AU 2 [href size=117997] +[11:29:36] [BN] +1s attente… +[11:29:40] [BN] +5s attente… +[11:29:42] [BN] download main page capturé: Bulletin de note.pdf +[11:29:42] OK BN EM-AU 2 [main download size=125570] +[11:29:43] [scan page=1] recherche 'EM-AU 2'… +[11:29:43] [scan page=1] -> TROUVE +[11:29:44] [nav abs] url après nav: https://escadaweb.vs.ch/Lehrpersonen/ViewAbsenzenErweitert.aspx?id=ef32322a-8bd9 +[11:29:44] [abs nav] url: https://escadaweb.vs.ch/Lehrpersonen/ViewAbsenzenErweitert.aspx?id=ef32322a-8bd9 +[11:29:44] [abs] page url avant clic: https://escadaweb.vs.ch/Lehrpersonen/ViewAbsenzenErweitert.aspx?id=ef32322a-8bd9 +[11:29:44] [abs] GET direct: https://escadaweb.vs.ch/Lehrpersonen/Reports/RptEscada.aspx?id=78c83132-4183-434 +[11:29:46] OK EM-AU 2 [href size=72579] +[11:29:46] MATU classes cibles: ['MP1-TASV 1A', 'MP1-TASV 1B', 'MP1-TASV 1C', 'MP1-TASV 1D', 'MP1-TASV 1E', 'MP1-TASV 2A', 'MP1-TASV 2B', 'MP1-TASV 2C', 'MP1-TASV 2D', 'MP1-TASV 2E'] +[11:29:46] MATU 1/10 MP1-TASV 1A +[11:29:47] [lrn p=1] 'MP1-TASV 1A' +[11:29:47] [lrn] lien trouvé : ViewLernende.aspx?id=00d5d0a5-a6ee-438f-84a8-b7be7feaf230 +[11:29:49] [lrn] OK bouton BN présent +[11:29:49] [MATU MP1-TASV 1A] href direct: https://escadaweb.vs.ch/Lehrpersonen/Reports/RptEscada.aspx?id=6aafbfa6-0139-41e +[11:29:50] OK MATU MP1-TASV 1A [href size=55167] +[11:29:50] MATU 2/10 MP1-TASV 1B +[11:29:52] [lrn p=1] 'MP1-TASV 1B' +[11:29:52] [lrn] lien trouvé : ViewLernende.aspx?id=7f50128d-d967-4456-b674-0843bb443f87 +[11:29:53] [lrn] OK bouton BN présent +[11:29:53] [MATU MP1-TASV 1B] href direct: https://escadaweb.vs.ch/Lehrpersonen/Reports/RptEscada.aspx?id=e69bcdcf-c117-440 +[11:29:55] OK MATU MP1-TASV 1B [href size=53244] +[11:29:55] MATU 3/10 MP1-TASV 1C +[11:29:56] [lrn p=1] 'MP1-TASV 1C' +[11:29:56] [lrn] tentative B: clic sur 'MP1-TASV 1C' dans la grille +[11:29:56] [pagination] -> page 2 +[11:29:57] [lrn p=2] 'MP1-TASV 1C' +[11:29:57] [lrn] lien trouvé : ViewLernende.aspx?id=b744b1dc-f208-4e53-8f7f-c04845ed1b8f +[11:29:58] [lrn] OK bouton BN présent +[11:29:58] [MATU MP1-TASV 1C] href direct: https://escadaweb.vs.ch/Lehrpersonen/Reports/RptEscada.aspx?id=757ed578-b54c-497 +[11:30:00] OK MATU MP1-TASV 1C [href size=54365] +[11:30:00] MATU 4/10 MP1-TASV 1D +[11:30:02] [lrn p=1] 'MP1-TASV 1D' +[11:30:02] [lrn] lien trouvé : ViewLernende.aspx?id=3aaa7dbf-2719-483c-9cad-b4c0f7324468 +[11:30:34] [lrn] OK bouton BN présent +[11:30:34] [MATU MP1-TASV 1D] href direct: https://escadaweb.vs.ch/Lehrpersonen/Reports/RptEscada.aspx?id=d6155f65-614c-4db +[11:30:35] OK MATU MP1-TASV 1D [href size=55279] +[11:30:35] MATU 5/10 MP1-TASV 1E +[11:30:37] [lrn p=1] 'MP1-TASV 1E' +[11:30:37] [lrn] tentative B: clic sur 'MP1-TASV 1E' dans la grille +[11:30:37] [pagination] -> page 2 +[11:30:37] [lrn p=2] 'MP1-TASV 1E' +[11:30:37] [lrn] lien trouvé : ViewLernende.aspx?id=710080d2-d4d5-48a5-b00a-ccf946ce893d +[11:30:39] [lrn] OK bouton BN présent +[11:30:39] [MATU MP1-TASV 1E] href direct: https://escadaweb.vs.ch/Lehrpersonen/Reports/RptEscada.aspx?id=24e0cfe7-4e1d-4de +[11:30:41] OK MATU MP1-TASV 1E [href size=53597] +[11:30:41] MATU 6/10 MP1-TASV 2A +[11:30:42] [lrn p=1] 'MP1-TASV 2A' +[11:30:42] [lrn] tentative B: clic sur 'MP1-TASV 2A' dans la grille +[11:30:42] [pagination] -> page 2 +[11:30:43] [lrn p=2] 'MP1-TASV 2A' +[11:30:43] [lrn] lien trouvé : ViewLernende.aspx?id=58830220-ee10-453a-a2ab-84c124faf31e +[11:30:44] [lrn] OK bouton BN présent +[11:30:44] [MATU MP1-TASV 2A] href direct: https://escadaweb.vs.ch/Lehrpersonen/Reports/RptEscada.aspx?id=98d089db-1004-499 +[11:30:47] OK MATU MP1-TASV 2A [href size=54517] +[11:30:47] MATU 7/10 MP1-TASV 2B +[11:30:48] [lrn p=1] 'MP1-TASV 2B' +[11:30:48] [lrn] tentative B: clic sur 'MP1-TASV 2B' dans la grille +[11:30:48] [pagination] -> page 2 +[11:30:49] [lrn p=2] 'MP1-TASV 2B' +[11:30:49] [lrn] lien trouvé : ViewLernende.aspx?id=28d96efc-5205-47b5-805e-c33f3342cad4 +[11:30:50] [lrn] OK bouton BN présent +[11:30:50] [MATU MP1-TASV 2B] href direct: https://escadaweb.vs.ch/Lehrpersonen/Reports/RptEscada.aspx?id=19ebc4ff-0a4c-41f +[11:30:52] OK MATU MP1-TASV 2B [href size=55264] +[11:30:52] MATU 8/10 MP1-TASV 2C +[11:30:54] [lrn p=1] 'MP1-TASV 2C' +[11:30:54] [lrn] tentative B: clic sur 'MP1-TASV 2C' dans la grille +[11:30:54] [pagination] -> page 2 +[11:30:54] [lrn p=2] 'MP1-TASV 2C' +[11:30:54] [lrn] lien trouvé : ViewLernende.aspx?id=2f587458-cf5f-45cb-8ae4-bbe6a6f6b396 +[11:30:56] [lrn] OK bouton BN présent +[11:30:56] [MATU MP1-TASV 2C] href direct: https://escadaweb.vs.ch/Lehrpersonen/Reports/RptEscada.aspx?id=a33e3ccb-58d1-4e1 +[11:30:58] OK MATU MP1-TASV 2C [href size=54478] +[11:30:58] MATU 9/10 MP1-TASV 2D +[11:30:59] [lrn p=1] 'MP1-TASV 2D' +[11:30:59] [lrn] tentative B: clic sur 'MP1-TASV 2D' dans la grille +[11:30:59] [pagination] -> page 2 +[11:31:00] [lrn p=2] 'MP1-TASV 2D' +[11:31:00] [lrn] lien trouvé : ViewLernende.aspx?id=0321398e-5d8b-40ac-9459-8720d69452f6 +[11:31:02] [lrn] OK bouton BN présent +[11:31:02] [MATU MP1-TASV 2D] href direct: https://escadaweb.vs.ch/Lehrpersonen/Reports/RptEscada.aspx?id=9e396013-68f4-43a +[11:31:04] OK MATU MP1-TASV 2D [href size=53305] +[11:31:04] MATU 10/10 MP1-TASV 2E +[11:31:05] [lrn p=1] 'MP1-TASV 2E' +[11:31:05] [lrn] tentative B: clic sur 'MP1-TASV 2E' dans la grille +[11:31:05] [pagination] -> page 2 +[11:31:06] [lrn p=2] 'MP1-TASV 2E' +[11:31:06] [lrn] lien trouvé : ViewLernende.aspx?id=1ae0d5ac-6c30-4a19-bd2e-271bf1fb41e1 +[11:31:07] [lrn] OK bouton BN présent +[11:31:07] [MATU MP1-TASV 2E] href direct: https://escadaweb.vs.ch/Lehrpersonen/Reports/RptEscada.aspx?id=3777eee2-7dcc-442 +[11:31:09] OK MATU MP1-TASV 2E [href size=53863] +[11:31:09] sync_all_done.json ecrit par subprocess +[11:31:09] run_imports lance (pid=242) +[11:31:09] ALL_DONE {"abs": ["/app/data/pdfs/esacada_EM-AU_1.pdf", "/app/data/pdfs/esacada_EM-AU_2.pdf"], "bn": ["/app/data/pdfs/bn_EM-AU_1.pdf", "/app/data/pdfs/bn_EM-AU_2.pdf"], "matu": ["/app/data/pdfs/matu_MP1-TASV_1A.pdf", "/app/data/pdfs/matu_MP1-TASV_1B.pdf", "/app/data/pdfs/matu_MP1-TASV_1C.pdf", "/app/data/pdfs/matu_MP1-TASV_1D.pdf", "/app/data/pdfs/matu_MP1-TASV_1E.pdf", "/app/data/pdfs/matu_MP1-TASV_2A.pdf", "/app/data/pdfs/matu_MP1-TASV_2B.pdf", "/app/data/pdfs/matu_MP1-TASV_2C.pdf", "/app/data/pdfs/matu_MP1-TASV_2D.pdf", "/app/data/pdfs/matu_MP1-TASV_2E.pdf"], "notes": ["/app/data/pdfs/notes_EM-AU_1.pdf", "/app/data/pdfs/notes_EM-AU_2.pdf"], "fiches": {}, "errors": []} + +[exit code = 0] diff --git a/logs/cron_tick.log b/logs/cron_tick.log new file mode 100644 index 0000000..cc471ce --- /dev/null +++ b/logs/cron_tick.log @@ -0,0 +1,2 @@ +[cron_tick] 2026-05-10T09:29:02 — 1 job(s) dûs + - #3 'Import absences toutes les 2h' kind=sync schedule=interval:120 diff --git a/scripts/cron_tick.py b/scripts/cron_tick.py new file mode 100755 index 0000000..31a804a --- /dev/null +++ b/scripts/cron_tick.py @@ -0,0 +1,464 @@ +#!/usr/bin/env python3 +"""Cron tick — appelé toutes les minutes par cron OS via docker exec. + +Logique : +1. Lire tous les CronJob.enabled = True +2. Pour chaque job, calculer s'il est dû maintenant (basé sur schedule + last_run_at) +3. Si dû : + - Si déjà running (PID alive) → kill -9 (politique "kill") + - Lancer subprocess (push, sync, ou push_then_sync) + - Stream stdout+stderr dans un fichier log dédié + - Mettre à jour last_status / last_message / last_run_at / last_pid +4. Envoyer notification Telegram selon notify_on + +Le timezone effectif est celui du container (Europe/Zurich attendu). + +Le script est idempotent et safe : si déjà passé pour un job aujourd'hui, +ne le relance pas. Si trop tard (>5 min après le slot), saute (un cron raté +n'est pas rattrapé). + +Usage : + python3 /app/scripts/cron_tick.py + python3 /app/scripts/cron_tick.py --dry-run # affiche ce qui serait lancé + python3 /app/scripts/cron_tick.py --job # force un job précis +""" + +from __future__ import annotations + +import argparse +import json +import os +import signal +import subprocess +import sys +import time +from datetime import datetime, timedelta +from pathlib import Path + +_ROOT = Path(__file__).resolve().parent.parent +sys.path.insert(0, str(_ROOT)) + +from src.db import CronJob, get_session, init_db, get_engine # noqa: E402 +from src.notifier import notify_job_result # noqa: E402 + +# Logs cron : par défaut /logs/cron (bind mount persistent), override via env var. +LOG_DIR = Path(os.getenv("CRON_LOG_DIR", "/logs/cron")) +try: + LOG_DIR.mkdir(parents=True, exist_ok=True) +except Exception: + # Fallback si /logs n'existe pas (ex: exécution hors container) + LOG_DIR = _ROOT / "logs" / "cron" + LOG_DIR.mkdir(parents=True, exist_ok=True) + +SCRIPT_SYNC = _ROOT / "scripts" / "sync_esacada.py" +SCRIPT_PUSH = _ROOT / "scripts" / "push_to_escada.py" +DATA_DIR = _ROOT / "data" + +# Marqueur écrit par run_imports.py à la fin des imports en DB +RUN_IMPORTS_RESULT = DATA_DIR / "sync_last_result.json" + +# Timeout d'attente de run_imports après que sync_esacada.py exit +RUN_IMPORTS_TIMEOUT_SEC = 15 * 60 # 15 min, large mais raisonnable + +# Slot de tolérance : si le cron OS rate une minute (charge, restart), +# on accepte d'exécuter dans la fenêtre [HH:MM, HH:MM+5min]. +_SLOT_TOLERANCE_MIN = 5 + +_DAY_NAMES = ["MON", "TUE", "WED", "THU", "FRI", "SAT", "SUN"] + + +# ── Schedule logic ──────────────────────────────────────────────────────────── + +def _is_due(job: CronJob, now: datetime) -> bool: + """Détermine si le job doit être lancé maintenant.""" + if not job.enabled: + return False + + last = job.last_run_at + + if job.schedule_kind == "interval": + # schedule_value = nb minutes + try: + minutes = int(job.schedule_value) + except (TypeError, ValueError): + return False + if minutes < 1: + return False + if last is None: + return True + return (now - last).total_seconds() >= minutes * 60 + + if job.schedule_kind == "daily": + # schedule_value = "HH:MM" + return _due_time_of_day(job.schedule_value, last, now) + + if job.schedule_kind == "weekly": + # schedule_value = "MON,WED,FRI:HH:MM" + try: + days_part, time_part = job.schedule_value.split(":", 1) + except ValueError: + return False + days = {d.strip().upper() for d in days_part.split(",") if d.strip()} + today_name = _DAY_NAMES[now.weekday()] + if today_name not in days: + return False + return _due_time_of_day(time_part, last, now) + + return False + + +def _due_time_of_day(hhmm: str, last: datetime | None, now: datetime) -> bool: + """True si l'heure actuelle est dans la fenêtre [HH:MM, HH:MM+tolerance] + et que le job n'a pas déjà tourné aujourd'hui.""" + try: + hh, mm = hhmm.split(":") + target = now.replace(hour=int(hh), minute=int(mm), second=0, microsecond=0) + except (ValueError, AttributeError): + return False + delta = (now - target).total_seconds() + if delta < 0 or delta > _SLOT_TOLERANCE_MIN * 60: + return False + if last is not None and last.date() == now.date() and last >= target: + return False + return True + + +# ── Process management ─────────────────────────────────────────────────────── + +def _pid_alive(pid: int | None) -> bool: + if not pid: + return False + try: + os.kill(pid, 0) + return True + except (ProcessLookupError, PermissionError): + return False + except Exception: + return False + + +def _kill_pid(pid: int) -> None: + try: + os.killpg(os.getpgid(pid), signal.SIGKILL) + except Exception: + try: + os.kill(pid, signal.SIGKILL) + except Exception: + pass + + +# ── Build command lines ────────────────────────────────────────────────────── + +def _classes_args(job: CronJob) -> list[str]: + """Retourne la liste des classes à passer aux scripts. Vide = toutes.""" + raw = (job.classes_json or "").strip() + if not raw or raw == "ALL": + return [] + try: + lst = json.loads(raw) + if isinstance(lst, list): + return [str(c) for c in lst] + except Exception: + pass + return [] + + +def _build_sync_cmd(job: CronJob) -> list[str]: + classes = _classes_args(job) + cmd = [sys.executable, str(SCRIPT_SYNC), "--sync-all", *classes] + if not job.sync_abs: cmd.append("--skip-abs") + if not job.sync_bn: cmd.append("--skip-bn") + if not job.sync_notes: cmd.append("--skip-notes") + if not job.sync_fiches: cmd.append("--skip-fiches") + if job.force_abs: cmd.append("--force-abs") + return cmd + + +def _build_push_cmd(job: CronJob) -> list[str]: + return [sys.executable, str(SCRIPT_PUSH)] + + +def _wait_for_run_imports(log_fp, mtime_before: float) -> tuple[bool, str, dict]: + """Après que sync_esacada.py a fini, run_imports.py tourne en sous-process + détaché. Attend que sync_last_result.json soit mis à jour, puis log les + résultats détaillés. Retourne (ok, summary_message, raw_result_dict).""" + log_fp.write("\n━━━ Attente run_imports (subprocess détaché) ━━━\n") + log_fp.flush() + + deadline = time.time() + RUN_IMPORTS_TIMEOUT_SEC + poll_count = 0 + while time.time() < deadline: + if RUN_IMPORTS_RESULT.exists() and RUN_IMPORTS_RESULT.stat().st_mtime > mtime_before: + break + poll_count += 1 + # Log un point tous les 30 polls (~1 min) pour montrer qu'on attend + if poll_count % 30 == 0: + elapsed = int(time.time() - (deadline - RUN_IMPORTS_TIMEOUT_SEC)) + log_fp.write(f"[poll] {elapsed}s écoulés, en attente…\n") + log_fp.flush() + time.sleep(2) + else: + log_fp.write("⚠ TIMEOUT — sync_last_result.json non mis à jour dans le délai\n") + log_fp.flush() + return False, "run_imports timeout (>15min sans résultat)", {} + + # Lire le résultat + try: + result = json.loads(RUN_IMPORTS_RESULT.read_text(encoding="utf-8")) + except Exception as e: + log_fp.write(f"⚠ Impossible de lire sync_last_result.json : {e}\n") + return False, f"sync_last_result.json illisible : {e}", {} + + res_abs = result.get("res_abs", []) or [] + res_bn = result.get("res_bn", []) or [] + res_notes = result.get("res_notes", []) or [] + res_matu = result.get("res_matu", []) or [] + errors = result.get("errors", []) or [] + ts = result.get("timestamp", "?") + + log_fp.write(f"run_imports terminé (timestamp {ts})\n") + log_fp.write(f" Absences PDF importés : {len(res_abs)}\n") + log_fp.write(f" Bulletins de notes : {len(res_bn)}\n") + log_fp.write(f" Notes d'examen : {len(res_notes)}\n") + log_fp.write(f" Notes Matu : {len(res_matu)}\n") + log_fp.write(f" Erreurs : {len(errors)}\n") + + # Détailler chaque catégorie si non vide + if res_abs: + log_fp.write("\n Détail absences :\n") + for r in res_abs: + log_fp.write(f" - {r}\n") + if res_bn: + log_fp.write("\n Détail BN :\n") + for r in res_bn: + log_fp.write(f" - {r}\n") + if res_notes: + log_fp.write("\n Détail notes d'examen :\n") + for r in res_notes: + log_fp.write(f" - {r}\n") + if res_matu: + log_fp.write("\n Détail Matu :\n") + for r in res_matu: + log_fp.write(f" - {r}\n") + if errors: + log_fp.write("\n ❌ ERREURS :\n") + for err in errors: + log_fp.write(f" - {err}\n") + log_fp.flush() + + if errors: + summary = ( + f"Imports : abs={len(res_abs)}, bn={len(res_bn)}, " + f"notes={len(res_notes)}, matu={len(res_matu)}, " + f"⚠ {len(errors)} erreur(s)" + ) + return False, summary, result + + summary = ( + f"Imports OK : abs={len(res_abs)}, bn={len(res_bn)}, " + f"notes={len(res_notes)}, matu={len(res_matu)}" + ) + return True, summary, result + + +# ── Run a single step ──────────────────────────────────────────────────────── + +def _run_step(cmd: list[str], log_fp, title: str) -> tuple[int, int]: + """Lance une commande, stream stdout+stderr dans log_fp. + Retourne (returncode, pid).""" + log_fp.write(f"\n━━━ {title} ━━━\n") + log_fp.write(f"$ {' '.join(cmd)}\n") + log_fp.flush() + + proc = subprocess.Popen( + cmd, + stdout=log_fp, + stderr=subprocess.STDOUT, + env={**os.environ, "PYTHONUNBUFFERED": "1"}, + start_new_session=True, + ) + pid = proc.pid + rc = proc.wait() + log_fp.write(f"\n[exit code = {rc}]\n") + log_fp.flush() + return rc, pid + + +# ── Run a job (full lifecycle) ─────────────────────────────────────────────── + +def run_job(job: CronJob, sess) -> None: + """Exécute un job. Met à jour son état en DB et envoie notification.""" + started = datetime.now() + ts = started.strftime("%Y%m%d-%H%M%S") + log_path = LOG_DIR / f"job_{job.id}_{ts}.log" + + # Politique "kill" : si déjà running (selon DB) et PID alive, on kill avant. + if job.last_status == "running" and _pid_alive(job.last_pid): + _kill_pid(job.last_pid or 0) + # Trace dans le log + with log_path.open("w", encoding="utf-8") as fp: + fp.write(f"[{started}] PID précédent {job.last_pid} kill -9 (politique kill)\n") + + # Marquer running + job.last_run_at = started + job.last_status = "running" + job.last_message = "" + job.last_log_path = str(log_path) + job.last_pid = None + sess.commit() + + overall_rc = 0 + final_msg = "" + last_pid: int | None = None + imports_result: dict = {} + + try: + with log_path.open("a", encoding="utf-8") as fp: + fp.write(f"\n=== Job #{job.id} '{job.name}' — démarré {started.isoformat(timespec='seconds')} ===\n") + fp.write(f"task_kind={job.task_kind} classes={job.classes_json}\n") + + steps: list[tuple[str, list[str]]] = [] + if job.task_kind == "push": + steps = [("Push Escada", _build_push_cmd(job))] + elif job.task_kind == "sync": + steps = [("Sync Escada", _build_sync_cmd(job))] + elif job.task_kind == "push_then_sync": + steps = [ + ("Push Escada", _build_push_cmd(job)), + ("Sync Escada", _build_sync_cmd(job)), + ] + else: + fp.write(f"[error] task_kind inconnu : {job.task_kind}\n") + overall_rc = 99 + final_msg = f"task_kind invalide : {job.task_kind}" + + for title, cmd in steps: + # Capturer mtime du marqueur run_imports AVANT le sync + # (utilisé après pour détecter la fin de run_imports.py) + is_sync = title.startswith("Sync") + mtime_before = ( + RUN_IMPORTS_RESULT.stat().st_mtime + if is_sync and RUN_IMPORTS_RESULT.exists() else 0.0 + ) + + rc, pid = _run_step(cmd, fp, title) + last_pid = pid + if rc != 0: + overall_rc = rc + final_msg = f"{title} a échoué (code {rc})" + break + + # Si c'était une étape sync, attendre que run_imports termine + if is_sync: + imports_ok, imports_msg, imports_result = _wait_for_run_imports(fp, mtime_before) + if not imports_ok: + overall_rc = 2 + final_msg = imports_msg + break + # On garde le message du sub pour la notif finale + final_msg = imports_msg + + if overall_rc == 0 and not final_msg: + final_msg = f"{len(steps)} étape(s) OK" + + except Exception as e: + overall_rc = 1 + final_msg = f"Exception : {e}" + try: + with log_path.open("a", encoding="utf-8") as fp: + import traceback + fp.write("\n[fatal exception]\n") + fp.write(traceback.format_exc()) + except Exception: + pass + + # État final en DB + finished = datetime.now() + duration = (finished - started).total_seconds() + job.last_status = "ok" if overall_rc == 0 else "fail" + job.last_message = final_msg + job.last_pid = last_pid + sess.commit() + + # Notification + try: + notify_job_result( + job_name=job.name, + status=job.last_status, + message=final_msg, + log_path=log_path, + chat_id=job.notify_chat_id or None, + notify_on=job.notify_on, + notify_level=getattr(job, "notify_level", "normal"), + duration_s=duration, + details=imports_result, + job_options={ + "task_kind": job.task_kind, + "sync_abs": job.sync_abs, + "sync_bn": job.sync_bn, + "sync_notes": job.sync_notes, + "sync_fiches": job.sync_fiches, + }, + ) + except Exception as e: + # Ne fait pas échouer le job pour une notif KO + try: + with log_path.open("a", encoding="utf-8") as fp: + fp.write(f"\n[notify] échec envoi : {e}\n") + except Exception: + pass + + +# ── Main loop ──────────────────────────────────────────────────────────────── + +def main() -> int: + parser = argparse.ArgumentParser(description="Cron tick — exécute les CronJob dûs.") + parser.add_argument("--dry-run", action="store_true", + help="Liste les jobs dûs sans les exécuter.") + parser.add_argument("--job", type=int, default=None, + help="Force l'exécution d'un job par son id (ignore schedule).") + args = parser.parse_args() + + # Garantir que la table existe + try: + init_db() + except Exception as e: + print(f"[cron_tick] init_db error : {e}", file=sys.stderr) + return 1 + + sess = get_session() + now = datetime.now() + + try: + if args.job is not None: + job = sess.get(CronJob, args.job) + if job is None: + print(f"[cron_tick] job id={args.job} introuvable") + return 1 + print(f"[cron_tick] forçage job #{job.id} '{job.name}'") + if args.dry_run: + return 0 + run_job(job, sess) + return 0 + + from sqlalchemy import select as _sel + jobs = sess.execute(_sel(CronJob).where(CronJob.enabled == True)).scalars().all() # noqa: E712 + + due_jobs = [j for j in jobs if _is_due(j, now)] + if not due_jobs: + return 0 # rien à faire + + print(f"[cron_tick] {now.isoformat(timespec='seconds')} — {len(due_jobs)} job(s) dûs") + for job in due_jobs: + print(f" - #{job.id} '{job.name}' kind={job.task_kind} schedule={job.schedule_kind}:{job.schedule_value}") + if args.dry_run: + continue + run_job(job, sess) + + return 0 + finally: + sess.close() + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/scripts/run_imports.py b/scripts/run_imports.py index 00c3e7d..f484377 100755 --- a/scripts/run_imports.py +++ b/scripts/run_imports.py @@ -51,7 +51,16 @@ for pdf_path in abs_pdfs: detail = f"{r.nb_absences_nouvelles} nouvelles" if r.nb_absences_mises_a_jour: detail += f", {r.nb_absences_mises_a_jour} maj" - res_abs.append({"classe": r.classe, "detail": detail}) + if r.nb_absences_pending_skipped: + detail += f", {r.nb_absences_pending_skipped} pending" + res_abs.append({ + "classe": r.classe, + "detail": detail, + "nouvelles": r.nb_absences_nouvelles, + "mises_a_jour": r.nb_absences_mises_a_jour, + "pending_skipped": r.nb_absences_pending_skipped, + "doublons": r.nb_absences_doublons, + }) app_log(f"[run_imports] abs {r.classe}: {detail}") except Exception as e: errors.append(f"Import abs {Path(pdf_path).name}: {e}") diff --git a/src/db.py b/src/db.py index b80980d..ca89f60 100644 --- a/src/db.py +++ b/src/db.py @@ -218,6 +218,52 @@ class SanctionExport(Base): apprenti: Mapped["Apprenti"] = relationship() +class CronJob(Base): + """Tâche planifiée (cron) pour pull/push Escada automatique.""" + __tablename__ = "cron_jobs" + + id: Mapped[int] = mapped_column(primary_key=True) + name: Mapped[str] + enabled: Mapped[bool] = mapped_column(default=True) + + # schedule_kind ∈ {"daily", "weekly", "interval"} + # daily : schedule_value="HH:MM" + # weekly : schedule_value="MON,TUE,WED,THU,FRI:HH:MM" + # interval: schedule_value="60" (minutes) + schedule_kind: Mapped[str] = mapped_column(default="daily") + schedule_value: Mapped[str] = mapped_column(default="03:00") + + # task_kind ∈ {"push", "sync", "push_then_sync"} + task_kind: Mapped[str] = mapped_column(default="push_then_sync") + + # Sous-options pour task sync + sync_abs: Mapped[bool] = mapped_column(default=True) + sync_bn: Mapped[bool] = mapped_column(default=True) + sync_notes: Mapped[bool] = mapped_column(default=True) + sync_fiches: Mapped[bool] = mapped_column(default=False) + force_abs: Mapped[bool] = mapped_column(default=False) + + # Liste de classes en JSON, ou "ALL" pour toutes + classes_json: Mapped[str] = mapped_column(default="ALL") + + # Notifications + # notify_on ∈ {"never", "always", "success", "failure"} + notify_on: Mapped[str] = mapped_column(default="failure") + # notify_level ∈ {"normal", "detailed"} + notify_level: Mapped[str] = mapped_column(default="normal") + notify_chat_id: Mapped[str] = mapped_column(default="") # override config global + + # État de la dernière exécution + last_run_at: Mapped[Optional[datetime]] = mapped_column(nullable=True) + last_status: Mapped[str] = mapped_column(default="") # "ok"|"fail"|"running"|"" + last_message: Mapped[str] = mapped_column(Text, default="") + last_log_path: Mapped[str] = mapped_column(default="") + last_pid: Mapped[Optional[int]] = mapped_column(nullable=True) + + created_at: Mapped[datetime] = mapped_column(default=datetime.now) + updated_at: Mapped[datetime] = mapped_column(default=datetime.now) + + def get_engine(db_url: str | None = None): url = db_url or f"sqlite:///{DB_PATH}" from sqlalchemy import event as _sa_event @@ -239,6 +285,7 @@ def init_db(engine=None): with engine.connect() as _conn: for stmt in ( "ALTER TABLE sanctions_export ADD COLUMN nb_absences INTEGER", + "ALTER TABLE cron_jobs ADD COLUMN notify_level TEXT DEFAULT 'normal'", """CREATE TABLE IF NOT EXISTS escada_pending ( id INTEGER PRIMARY KEY, apprenti_id INTEGER NOT NULL REFERENCES apprentis(id), diff --git a/src/importer.py b/src/importer.py index 8e44332..a37501c 100644 --- a/src/importer.py +++ b/src/importer.py @@ -24,6 +24,7 @@ class ImportResult: nb_absences_doublons: int nb_absences_mises_a_jour: int = 0 nb_absences_supprimees: int = 0 + nb_absences_pending_skipped: int = 0 # absences non modifiées car pending vers Escada details_nouvelles: list[str] = field(default_factory=list) details_mises_a_jour: list[str] = field(default_factory=list) @@ -51,6 +52,7 @@ def import_pdf( nb_doublons = 0 nb_mises_a_jour = 0 nb_supprimees = 0 + nb_pending_skipped = 0 # Détails par apprenti : {apprenti_id: {"nom": str, "prenom": str, "dates": [str]}} _nouv_by_ap: dict[int, dict] = {} @@ -113,6 +115,7 @@ def import_pdf( elif ep_pending: # Modification en attente de sync vers Escada → ne pas écraser nb_doublons += 1 + nb_pending_skipped += 1 elif existe.type_origine != ab["type_absence"]: existe.type_origine = ab["type_absence"] existe.statut = "excusee" if ab["type_absence"] == "E" else "a_traiter" @@ -188,6 +191,7 @@ def import_pdf( nb_absences_doublons=nb_doublons, nb_absences_mises_a_jour=nb_mises_a_jour, nb_absences_supprimees=nb_supprimees, + nb_absences_pending_skipped=nb_pending_skipped, details_nouvelles=[_fmt(d) for d in _nouv_by_ap.values()], details_mises_a_jour=[_fmt(d) for d in _upd_by_ap.values()], ) diff --git a/src/notifier.py b/src/notifier.py new file mode 100644 index 0000000..4b78ea7 --- /dev/null +++ b/src/notifier.py @@ -0,0 +1,217 @@ +"""Notifier — envoi de notifications via bot Telegram. + +Configuration globale (environment variables, lue depuis .env.prod via docker-compose) : +- TELEGRAM_BOT_TOKEN : token du bot (obtenu via @BotFather) +- TELEGRAM_CHAT_ID : chat id par défaut (obtenu via getUpdates ou @userinfobot) + +Override par job possible via le champ CronJob.notify_chat_id. +""" + +from __future__ import annotations + +import json +import os +import urllib.error +import urllib.parse +import urllib.request +from pathlib import Path + + +_TG_API = "https://api.telegram.org" + + +def _bot_token() -> str: + return os.getenv("TELEGRAM_BOT_TOKEN", "").strip() + + +def _default_chat_id() -> str: + return os.getenv("TELEGRAM_CHAT_ID", "").strip() + + +def send_telegram(text: str, chat_id: str | None = None, *, parse_mode: str = "HTML") -> bool: + """Envoie un message Telegram. Retourne True si succès, False sinon. + + parse_mode="HTML" supporte , , ,
.
+    Tronque automatiquement à 4096 caractères (limite Telegram).
+    """
+    token = _bot_token()
+    chat = (chat_id or "").strip() or _default_chat_id()
+    if not token or not chat:
+        return False
+
+    text = text[:4090] + "\n…" if len(text) > 4096 else text
+    url = f"{_TG_API}/bot{token}/sendMessage"
+    data = urllib.parse.urlencode({
+        "chat_id": chat,
+        "text": text,
+        "parse_mode": parse_mode,
+        "disable_web_page_preview": "true",
+    }).encode()
+
+    try:
+        req = urllib.request.Request(url, data=data, method="POST")
+        with urllib.request.urlopen(req, timeout=15) as r:
+            payload = json.loads(r.read())
+            return bool(payload.get("ok"))
+    except urllib.error.HTTPError as e:
+        try:
+            err_body = e.read().decode()
+        except Exception:
+            err_body = str(e)
+        print(f"[notifier] Telegram HTTPError {e.code}: {err_body[:200]}")
+        return False
+    except Exception as e:
+        print(f"[notifier] Telegram error: {e}")
+        return False
+
+
+def notify_job_result(
+    job_name: str,
+    status: str,
+    message: str,
+    log_path: Path | str | None = None,
+    chat_id: str | None = None,
+    notify_on: str = "failure",
+    notify_level: str = "normal",
+    duration_s: float | None = None,
+    details: dict | None = None,
+    job_options: dict | None = None,
+) -> bool:
+    """Envoie une notif Telegram selon le statut et la politique notify_on.
+
+    Args:
+        job_name: nom du cron job
+        status: "ok" | "fail"
+        message: message court (1-2 lignes)
+        log_path: ignoré au niveau "normal" et "detailed" (l'utilisateur n'en veut pas)
+        chat_id: override du chat id (sinon TELEGRAM_CHAT_ID)
+        notify_on: "never" | "always" | "success" | "failure"
+        notify_level: "normal" (nom + statut + durée) ou "detailed" (+ détails import)
+        duration_s: durée d'exécution en secondes
+        details: dict avec clés optionnelles depuis sync_last_result.json :
+            - res_abs: list[dict avec classe, nouvelles, mises_a_jour, pending_skipped]
+            - res_bn:  list[dict avec classe, nb]
+            - res_notes: list[dict avec classe, nb]
+            - res_matu: list[dict avec classe, nb]
+            - errors: list[str]
+        job_options: dict avec options du job pour savoir ce qui était sélectionné :
+            - sync_bn, sync_notes, sync_fiches, etc. (booléens)
+    """
+    if notify_on == "never":
+        return False
+    if notify_on == "success" and status != "ok":
+        return False
+    if notify_on == "failure" and status == "ok":
+        return False
+
+    icon = "✅" if status == "ok" else "❌"
+    title = "Réussi" if status == "ok" else "Échec"
+    parts = [
+        f"{icon} {_escape_html(job_name)} — {title}",
+    ]
+    if duration_s is not None:
+        parts.append(f"⏱ Durée : {_fmt_duration(duration_s)}")
+
+    # Niveau normal — message court uniquement
+    if notify_level != "detailed":
+        if message and status != "ok":
+            # En cas d'échec, on garde le message d'erreur même en normal
+            msg = message.strip()
+            if len(msg) > 500:
+                msg = msg[:500] + "…"
+            parts.append(f"
{_escape_html(msg)}
") + return send_telegram("\n".join(parts), chat_id=chat_id) + + # Niveau detailed — détails par classe et catégorie + job_options = job_options or {} + details = details or {} + + # Erreurs en premier si présentes + errors = details.get("errors") or [] + if errors: + parts.append("\n⚠ Erreurs") + for err in errors[:10]: + parts.append(f" • {_escape_html(str(err)[:200])}") + if len(errors) > 10: + parts.append(f" … +{len(errors) - 10} autre(s)") + + # Absences (toujours affichées si présentes) + res_abs = details.get("res_abs") or [] + if res_abs: + parts.append("\n📋 Absences") + for r in res_abs: + classe = r.get("classe", "?") + nouv = int(r.get("nouvelles", 0) or 0) + maj = int(r.get("mises_a_jour", 0) or 0) + pend = int(r.get("pending_skipped", 0) or 0) + parts.append( + f" • {_escape_html(classe)} : " + f"{nouv} nouv. · {maj} modif. · {pend} pending" + ) + + # BN (seulement si sync_bn coché) + if job_options.get("sync_bn"): + res_bn = details.get("res_bn") or [] + parts.append("\n📊 Bulletins") + if res_bn: + for r in res_bn: + parts.append( + f" • {_escape_html(r.get('classe', '?'))} : {r.get('nb', '?')} apprenti(s)" + ) + else: + parts.append(" Aucun import") + + # Notes d'examen (seulement si sync_notes coché) + if job_options.get("sync_notes"): + res_notes = details.get("res_notes") or [] + parts.append("\n📝 Notes d'examen") + if res_notes: + for r in res_notes: + parts.append( + f" • {_escape_html(r.get('classe', '?'))} : {r.get('nb', '?')} apprenti(s)" + ) + else: + parts.append(" Aucun import") + + # Notes Matu (seulement si BN coché — Matu est lié aux apprentis BN) + if job_options.get("sync_bn"): + res_matu = details.get("res_matu") or [] + if res_matu: + parts.append("\n🎓 Matu") + for r in res_matu[:8]: # cap à 8 pour pas exploser le message + parts.append( + f" • {_escape_html(r.get('classe', '?'))} : {r.get('nb', '?')} apprenti(s)" + ) + if len(res_matu) > 8: + parts.append(f" … +{len(res_matu) - 8} classe(s)") + + return send_telegram("\n".join(parts), chat_id=chat_id) + + +def _fmt_duration(seconds: float) -> str: + s = int(seconds) + if s < 60: + return f"{s}s" + if s < 3600: + return f"{s // 60}min {s % 60}s" + return f"{s // 3600}h {(s % 3600) // 60}min" + + +def _escape_html(s: str) -> str: + return ( + s.replace("&", "&") + .replace("<", "<") + .replace(">", ">") + ) + + +def test_telegram() -> tuple[bool, str]: + """Test rapide : envoie un message de ping. Retourne (ok, message).""" + if not _bot_token(): + return False, "TELEGRAM_BOT_TOKEN non configuré dans l'environnement" + if not _default_chat_id(): + return False, "TELEGRAM_CHAT_ID non configuré dans l'environnement" + ok = send_telegram("✓ EPTM Dashboard\nTest de notification — tout est OK.") + if ok: + return True, "Message envoyé" + return False, "Échec de l'envoi (voir logs container)"