eptm_dashboard/scripts/run_imports.py
Julien Balet f60cbf1b1c sync escada : gestion fine des pendings + détection orphelines
- importer.py : nouvelle logique pour les 4 cas d'absence × pending :
  * abs en PDF + pending modify : pending wins (sans force) / override (force)
  * abs en PDF + pas en DB + pending action=clear : respecte la suppression
    locale (sans force) / recrée l'abs (force)
  * orpheline (DB sans PDF) sans pending : supprimée + comptée + détaillée
  * orpheline avec pending : conservée (sans force) / supprimée (force)
- importer.py : query orpheline par classe + fenêtre de dates du PDF
  (couvre les abs locales avec import_id=None)
- run_imports.py : remonte orphelines + pending_skipped dans res_abs
- notifier.py : niveau detailed inclut absences supprimées par classe
  + détail des orphelines (max 5 par classe)
- escada.py : sépare cache disque (toutes classes pour matching Matu)
  vs liste UI (filtrée MP/MI/Formation)
- escada.py : timeout polling import passe de 60s à 15min
- escada.py : retire mode test push, fix bouton Actualiser bloqué sans
  classe sélectionnée
- cron.py : reset last_run_at à l'activation d'un job pour relance
  immédiate au prochain tick

Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
2026-05-10 15:24:43 +02:00

169 lines
6 KiB
Python
Executable file

#!/usr/bin/env python3
"""
Lit sync_all_done.json, lance les imports PDF → DB, écrit sync_last_result.json.
Lancé comme subprocess indépendant par reset_sync (start_new_session=True).
Args: <data_dir> <username> <force_abs:0|1>
"""
import json, sys
from pathlib import Path
from datetime import datetime
_ROOT = Path(__file__).resolve().parent.parent
sys.path.insert(0, str(_ROOT))
DATA_DIR = Path(sys.argv[1]) if len(sys.argv) > 1 else _ROOT / "data"
USERNAME = sys.argv[2] if len(sys.argv) > 2 else "escada"
FORCE_ABS = len(sys.argv) > 3 and sys.argv[3] == "1"
ALL_DONE = DATA_DIR / "sync_all_done.json"
RESULT = DATA_DIR / "sync_last_result.json"
from src.logger import app_log
app_log("[run_imports] démarré")
try:
raw = json.loads(ALL_DONE.read_text(encoding="utf-8"))
payload = raw.get("payload", {})
except Exception as e:
app_log(f"[run_imports] ERREUR lecture all_done: {e}")
sys.exit(1)
from src.db import get_session
from src.importer import import_pdf as do_import
from src.importer_bn import import_bn as do_import_bn
from src.importer_matu import import_matu as do_import_matu
from src.importer_notes import import_notes_pdf
from src.db import Apprenti, EscadaPending, upsert_apprenti_fiche, _norm_prenom
from sqlalchemy import select
abs_pdfs = payload.get("abs", [])
bn_pdfs = payload.get("bn", [])
matu_pdfs = payload.get("matu", [])
notes_pdfs = payload.get("notes", [])
fiches = payload.get("fiches", {})
errors = list(payload.get("errors", []))
res_abs = []
for pdf_path in abs_pdfs:
sess = get_session()
try:
r = do_import(Path(pdf_path), sess, imported_by=USERNAME, force=FORCE_ABS)
detail = f"{r.nb_absences_nouvelles} nouvelles"
if r.nb_absences_mises_a_jour:
detail += f", {r.nb_absences_mises_a_jour} maj"
if r.nb_absences_pending_skipped:
detail += f", {r.nb_absences_pending_skipped} pending"
if r.nb_absences_supprimees:
detail += f", {r.nb_absences_supprimees} orphelines"
res_abs.append({
"classe": r.classe,
"detail": detail,
"nouvelles": r.nb_absences_nouvelles,
"mises_a_jour": r.nb_absences_mises_a_jour,
"pending_skipped": r.nb_absences_pending_skipped,
"doublons": r.nb_absences_doublons,
"orphelines": r.nb_absences_supprimees,
"pendings_orphelins": r.nb_pendings_orphelins_supprimes,
"details_orphelines": r.details_orphelines,
})
app_log(f"[run_imports] abs {r.classe}: {detail}")
if r.details_orphelines:
for d in r.details_orphelines:
app_log(f"[run_imports] orpheline supprimée : {r.classe} | {d}")
except Exception as e:
errors.append(f"Import abs {Path(pdf_path).name}: {e}")
app_log(f"[run_imports] erreur abs: {e}")
finally:
sess.close()
res_bn = []
for pdf_path in bn_pdfs:
sess = get_session()
try:
r = do_import_bn(Path(pdf_path), sess, imported_by=USERNAME)
res_bn.append({"classe": r.classe, "nb": str(r.nb_apprentis)})
app_log(f"[run_imports] BN {r.classe}: {r.nb_apprentis}")
except Exception as e:
errors.append(f"Import BN {Path(pdf_path).name}: {e}")
app_log(f"[run_imports] erreur BN: {e}")
finally:
sess.close()
res_notes = []
for pdf_path in notes_pdfs:
sess = get_session()
try:
r = import_notes_pdf(Path(pdf_path), sess)
res_notes.append({"classe": r["classe"], "nb": str(r["nb"])})
app_log(f"[run_imports] notes {r['classe']}: {r['nb']} apprenti(s)")
except Exception as e:
errors.append(f"Import notes {Path(pdf_path).name}: {e}")
app_log(f"[run_imports] erreur notes: {e}")
finally:
sess.close()
res_matu = []
for pdf_path in matu_pdfs:
sess = get_session()
try:
r, unmatched = do_import_matu(Path(pdf_path), sess, imported_by=USERNAME)
res_matu.append({
"classe": r.classe_mp or Path(pdf_path).stem,
"nb": str(r.nb_apprentis),
"unmatched": ", ".join(unmatched) if unmatched else "",
})
app_log(f"[run_imports] matu {r.classe_mp}: {r.nb_apprentis}")
except Exception as e:
errors.append(f"Import matu {Path(pdf_path).name}: {e}")
app_log(f"[run_imports] erreur matu: {e}")
finally:
sess.close()
if fiches:
sess = get_session()
try:
for cls, fiches_list in fiches.items():
cls_apprentis = sess.execute(
select(Apprenti).where(Apprenti.classe == cls)
).scalars().all()
for fiche in fiches_list:
nom_eleve = fiche.get("nom_eleve", "")
if not nom_eleve:
continue
nom_norm = _norm_prenom(nom_eleve)
ap = None
for a in cls_apprentis:
full_norm = _norm_prenom(f"{a.nom} {a.prenom}")
if full_norm == nom_norm or full_norm.startswith(nom_norm + " "):
ap = a
break
if ap:
upsert_apprenti_fiche(sess, ap.id, fiche)
sess.commit()
app_log(f"[run_imports] fiches: {sum(len(v) for v in fiches.values())} entrées")
except Exception as e:
errors.append(f"Import fiches: {e}")
sess.rollback()
app_log(f"[run_imports] erreur fiches: {e}")
finally:
sess.close()
result = {
"timestamp": datetime.now().isoformat(),
"res_abs": res_abs,
"res_bn": res_bn,
"res_notes": res_notes,
"res_matu": res_matu,
"errors": errors,
"op_log": "",
}
try:
DATA_DIR.mkdir(parents=True, exist_ok=True)
RESULT.write_text(json.dumps(result, ensure_ascii=False), encoding="utf-8")
app_log(f"[run_imports] résultats sauvegardés — notes={len(res_notes)} abs={len(res_abs)} erreurs={len(errors)}")
except Exception as e:
app_log(f"[run_imports] ERREUR sauvegarde: {e}")
sys.exit(1)
app_log("[run_imports] terminé OK")