152 lines
5.2 KiB
Python
Executable file
152 lines
5.2 KiB
Python
Executable file
#!/usr/bin/env python3
|
|
"""
|
|
Lit sync_all_done.json, lance les imports PDF → DB, écrit sync_last_result.json.
|
|
Lancé comme subprocess indépendant par reset_sync (start_new_session=True).
|
|
Args: <data_dir> <username> <force_abs:0|1>
|
|
"""
|
|
import json, sys
|
|
from pathlib import Path
|
|
from datetime import datetime
|
|
|
|
_ROOT = Path(__file__).resolve().parent.parent
|
|
sys.path.insert(0, str(_ROOT))
|
|
|
|
DATA_DIR = Path(sys.argv[1]) if len(sys.argv) > 1 else _ROOT / "data"
|
|
USERNAME = sys.argv[2] if len(sys.argv) > 2 else "escada"
|
|
FORCE_ABS = len(sys.argv) > 3 and sys.argv[3] == "1"
|
|
ALL_DONE = DATA_DIR / "sync_all_done.json"
|
|
RESULT = DATA_DIR / "sync_last_result.json"
|
|
|
|
from src.logger import app_log
|
|
|
|
app_log("[run_imports] démarré")
|
|
|
|
try:
|
|
raw = json.loads(ALL_DONE.read_text(encoding="utf-8"))
|
|
payload = raw.get("payload", {})
|
|
except Exception as e:
|
|
app_log(f"[run_imports] ERREUR lecture all_done: {e}")
|
|
sys.exit(1)
|
|
|
|
from src.db import get_session
|
|
from src.importer import import_pdf as do_import
|
|
from src.importer_bn import import_bn as do_import_bn
|
|
from src.importer_matu import import_matu as do_import_matu
|
|
from src.importer_notes import import_notes_pdf
|
|
from src.db import Apprenti, EscadaPending, upsert_apprenti_fiche, _norm_prenom
|
|
from sqlalchemy import select
|
|
|
|
abs_pdfs = payload.get("abs", [])
|
|
bn_pdfs = payload.get("bn", [])
|
|
matu_pdfs = payload.get("matu", [])
|
|
notes_pdfs = payload.get("notes", [])
|
|
fiches = payload.get("fiches", {})
|
|
errors = list(payload.get("errors", []))
|
|
|
|
res_abs = []
|
|
for pdf_path in abs_pdfs:
|
|
sess = get_session()
|
|
try:
|
|
r = do_import(Path(pdf_path), sess, imported_by=USERNAME, force=FORCE_ABS)
|
|
detail = f"{r.nb_absences_nouvelles} nouvelles"
|
|
if r.nb_absences_mises_a_jour:
|
|
detail += f", {r.nb_absences_mises_a_jour} maj"
|
|
res_abs.append({"classe": r.classe, "detail": detail})
|
|
app_log(f"[run_imports] abs {r.classe}: {detail}")
|
|
except Exception as e:
|
|
errors.append(f"Import abs {Path(pdf_path).name}: {e}")
|
|
app_log(f"[run_imports] erreur abs: {e}")
|
|
finally:
|
|
sess.close()
|
|
|
|
res_bn = []
|
|
for pdf_path in bn_pdfs:
|
|
sess = get_session()
|
|
try:
|
|
r = do_import_bn(Path(pdf_path), sess, imported_by=USERNAME)
|
|
res_bn.append({"classe": r.classe, "nb": str(r.nb_apprentis)})
|
|
app_log(f"[run_imports] BN {r.classe}: {r.nb_apprentis}")
|
|
except Exception as e:
|
|
errors.append(f"Import BN {Path(pdf_path).name}: {e}")
|
|
app_log(f"[run_imports] erreur BN: {e}")
|
|
finally:
|
|
sess.close()
|
|
|
|
res_notes = []
|
|
for pdf_path in notes_pdfs:
|
|
sess = get_session()
|
|
try:
|
|
r = import_notes_pdf(Path(pdf_path), sess)
|
|
res_notes.append({"classe": r["classe"], "nb": str(r["nb"])})
|
|
app_log(f"[run_imports] notes {r['classe']}: {r['nb']} apprenti(s)")
|
|
except Exception as e:
|
|
errors.append(f"Import notes {Path(pdf_path).name}: {e}")
|
|
app_log(f"[run_imports] erreur notes: {e}")
|
|
finally:
|
|
sess.close()
|
|
|
|
res_matu = []
|
|
for pdf_path in matu_pdfs:
|
|
sess = get_session()
|
|
try:
|
|
r, unmatched = do_import_matu(Path(pdf_path), sess, imported_by=USERNAME)
|
|
res_matu.append({
|
|
"classe": r.classe_mp or Path(pdf_path).stem,
|
|
"nb": str(r.nb_apprentis),
|
|
"unmatched": ", ".join(unmatched) if unmatched else "",
|
|
})
|
|
app_log(f"[run_imports] matu {r.classe_mp}: {r.nb_apprentis}")
|
|
except Exception as e:
|
|
errors.append(f"Import matu {Path(pdf_path).name}: {e}")
|
|
app_log(f"[run_imports] erreur matu: {e}")
|
|
finally:
|
|
sess.close()
|
|
|
|
if fiches:
|
|
sess = get_session()
|
|
try:
|
|
for cls, fiches_list in fiches.items():
|
|
cls_apprentis = sess.execute(
|
|
select(Apprenti).where(Apprenti.classe == cls)
|
|
).scalars().all()
|
|
for fiche in fiches_list:
|
|
nom_eleve = fiche.get("nom_eleve", "")
|
|
if not nom_eleve:
|
|
continue
|
|
nom_norm = _norm_prenom(nom_eleve)
|
|
ap = None
|
|
for a in cls_apprentis:
|
|
full_norm = _norm_prenom(f"{a.nom} {a.prenom}")
|
|
if full_norm == nom_norm or full_norm.startswith(nom_norm + " "):
|
|
ap = a
|
|
break
|
|
if ap:
|
|
upsert_apprenti_fiche(sess, ap.id, fiche)
|
|
sess.commit()
|
|
app_log(f"[run_imports] fiches: {sum(len(v) for v in fiches.values())} entrées")
|
|
except Exception as e:
|
|
errors.append(f"Import fiches: {e}")
|
|
sess.rollback()
|
|
app_log(f"[run_imports] erreur fiches: {e}")
|
|
finally:
|
|
sess.close()
|
|
|
|
result = {
|
|
"timestamp": datetime.now().isoformat(),
|
|
"res_abs": res_abs,
|
|
"res_bn": res_bn,
|
|
"res_notes": res_notes,
|
|
"res_matu": res_matu,
|
|
"errors": errors,
|
|
"op_log": "",
|
|
}
|
|
|
|
try:
|
|
DATA_DIR.mkdir(parents=True, exist_ok=True)
|
|
RESULT.write_text(json.dumps(result, ensure_ascii=False), encoding="utf-8")
|
|
app_log(f"[run_imports] résultats sauvegardés — notes={len(res_notes)} abs={len(res_abs)} erreurs={len(errors)}")
|
|
except Exception as e:
|
|
app_log(f"[run_imports] ERREUR sauvegarde: {e}")
|
|
sys.exit(1)
|
|
|
|
app_log("[run_imports] terminé OK")
|