import escada ok

This commit is contained in:
Julien Balet 2026-05-09 23:27:46 +02:00
parent 0182188de5
commit 096dfd727b
5 changed files with 2553 additions and 0 deletions

467
scripts/push_to_escada.py Normal file
View file

@ -0,0 +1,467 @@
"""Pousse vers Escada les changements de statut effectués dans l'app.
Usage :
python scripts/push_to_escada.py # tous les changements en attente
python scripts/push_to_escada.py --test # test limité à Poidevin Alexandre / EM-AU 1
python scripts/push_to_escada.py --count # affiche le nombre de changements en attente
python scripts/push_to_escada.py --no-pull # ne pas récupérer le serveur avant push
"""
from __future__ import annotations
import json
import subprocess
import sys
from datetime import date
from pathlib import Path
_root = Path(__file__).resolve().parent.parent
if str(_root) not in sys.path:
sys.path.insert(0, str(_root))
if hasattr(sys.stdout, "reconfigure"):
sys.stdout.reconfigure(encoding="utf-8", errors="replace")
if hasattr(sys.stderr, "reconfigure"):
sys.stderr.reconfigure(encoding="utf-8", errors="replace")
from playwright.sync_api import sync_playwright, TimeoutError as PWTimeout
from src.db import Absence, Apprenti, EscadaPending, get_engine, init_db, upsert_escada_pending
from sqlalchemy.orm import sessionmaker, Session
from sqlalchemy import select
# Réutilise les utilitaires de navigation depuis sync_esacada
from scripts.sync_esacada import (
BASE_URL, CLASSES_URL, PROFILE_DIR,
_log, _ensure_logged_in, _launch_context,
_go_to_absence_page, _cache_load,
)
# ── Coordonnées du serveur ────────────────────────────────────────────────────
_SSH_HOST = "julbal@20.199.136.37"
_SSH_REMOTE = "/opt/absences"
# ── Interaction avec la page d'absences ───────────────────────────────────────
_JS_SET_DROPDOWN = """([nom, prenom, idx, val]) => {
for (const tr of document.querySelectorAll('tr')) {
// Vérifier que nom et prénom apparaissent dans des <td> directs courts
// (les lignes-containers du grid DevExpress ont des <td> très longs
// contenant tous les élèves on les exclut via la limite 200 chars)
const directTds = Array.from(tr.querySelectorAll(':scope > td'));
if (!directTds.length) continue;
const hasNom = directTds.some(td => {
const t = (td.innerText || td.textContent || '').trim();
return t.includes(nom) && t.length < 200;
});
const hasPrenom = directTds.some(td => {
const t = (td.innerText || td.textContent || '').trim();
return t.includes(prenom) && t.length < 200;
});
if (!hasNom || !hasPrenom) continue;
const sels = Array.from(tr.querySelectorAll('select'));
// Exclure les containers (trop de selects = plusieurs élèves fusionnés)
if (sels.length > 25) continue;
if (sels.length <= idx) {
return {ok: false, reason: 'seulement ' + sels.length + ' selects, besoin de ' + (idx+1)};
}
const prev = sels[idx].value;
sels[idx].value = val;
if (sels[idx].value !== val) {
const opts = Array.from(sels[idx].options).map(o => '"' + o.value + '"="' + o.text.trim() + '"').join(', ');
return {ok: false, reason: 'valeur "' + val + '" absente — options: {' + opts + '}'};
}
sels[idx].dispatchEvent(new Event('change', {bubbles: true}));
return {ok: true, prev: prev};
}
return {ok: false, reason: 'ligne introuvable pour ' + nom + ' ' + prenom};
}"""
_JS_SET_DATE = """([dateStr]) => {
// Cherche le DevExpress DateEdit : id se termine par _I
const candidates = [
document.querySelector("input[id*='kalender_I']"),
document.querySelector("input[id*='DateEdit_I']"),
document.querySelector("input[id*='_Date_I']"),
(() => {
const all = document.querySelectorAll("table input[type='text']");
return all.length ? all[0] : null;
})(),
];
const inp = candidates.find(Boolean);
if (!inp) return {ok: false, reason: 'input introuvable'};
inp.value = dateStr;
// Déclenche l'événement ASPx (postback DevExpress)
const ctrlName = inp.id.endsWith('_I') ? inp.id.slice(0, -2) : inp.id;
try {
if (typeof ASPx !== 'undefined' && ASPx.ETextChanged) {
ASPx.ETextChanged(ctrlName);
return {ok: true, method: 'ASPx', id: inp.id};
}
} catch(e) {}
// Fallback : événements DOM standards
inp.dispatchEvent(new Event('change', {bubbles: true}));
inp.dispatchEvent(new Event('input', {bubbles: true}));
return {ok: true, method: 'DOM', id: inp.id};
}"""
def _set_date(page, target_date: date) -> bool:
"""Change la date dans le sélecteur DevExpress et attend le rechargement."""
date_str = target_date.strftime("%d/%m/%Y")
try:
result = page.evaluate(_JS_SET_DATE, [date_str])
if not result.get("ok"):
_log(f" [set_date] ERR : {result.get('reason', '?')}")
return False
_log(f" [set_date] date={date_str} via {result.get('method')} id={result.get('id')}")
page.wait_for_timeout(400)
try:
page.wait_for_load_state("networkidle", timeout=20_000)
except Exception:
pass
page.wait_for_timeout(600)
# Vérification : comparer la valeur affichée dans l'input (pas innerText)
cur_val = page.evaluate(
"() => { const i = document.getElementById('ContentPlaceHolder_site_kalender_I'); return i ? i.value : ''; }"
)
if cur_val == date_str:
_log(f" [set_date] OK (input value = {cur_val})")
return True
# Fallback : interaction directe Playwright
_log(f" [set_date] input value='{cur_val}''{date_str}', tentative fill+Tab…")
try:
inp2 = page.locator("input[id*='kalender_I']").first
if not inp2.count():
inp2 = page.locator("table input[type='text']").first
if inp2.count():
inp2.click(click_count=3)
inp2.fill(date_str)
page.keyboard.press("Tab")
page.wait_for_timeout(400)
try:
page.wait_for_load_state("networkidle", timeout=20_000)
except Exception:
pass
page.wait_for_timeout(600)
cur_val2 = page.evaluate(
"() => { const i = document.getElementById('ContentPlaceHolder_site_kalender_I'); return i ? i.value : ''; }"
)
if cur_val2 == date_str:
_log(f" [set_date] OK via fill+Tab (input value = {cur_val2})")
return True
_log(f" [set_date] ERR : input value='{cur_val2}' après fill+Tab")
except Exception as e2:
_log(f" [set_date] fill+Tab ERR : {e2}")
return False
except Exception as e:
_log(f" [set_date] ERR : {e}")
return False
_ESCADA_CODES = {"E": "68", "N": "56", "clear": "0"}
def _set_dropdown(page, nom: str, prenom: str, periode: int, action: str) -> bool:
"""Positionne le dropdown d'une période pour un apprenti.
Colonnes selects dans la ligne : Remarques(0) | Journée entière(1) | Excuser(2) | P1(3)P10(12)
target_idx = periode + 2
"""
val = _ESCADA_CODES.get(action, "0")
target_idx = periode + 1
try:
result = page.evaluate(_JS_SET_DROPDOWN, [nom, prenom, target_idx, val])
if result.get("ok"):
prev = result.get("prev", "?")
_log(f" SET {nom} {prenom} P{periode} : '{prev}''{val}'")
return True
else:
_log(f" WARN {nom} {prenom} P{periode} : {result.get('reason', '?')}")
return False
except Exception as e:
_log(f" ERR {nom} {prenom} P{periode} : {e}")
return False
def _save(page) -> bool:
"""Clique sur le bouton Enregistrer via JS (bypass visibilité DevExpress)."""
try:
clicked = page.evaluate("""() => {
// Cherche tous les inputs Enregistrer (visibles ou non)
const btns = Array.from(document.querySelectorAll(
'input[value="Enregistrer"], input[value*="nregistrer"]'
));
if (!btns.length) return {ok: false, reason: 'bouton introuvable'};
// Préférer le visible, sinon prendre le premier
const btn = btns.find(b => b.offsetParent !== null) || btns[0];
btn.click();
return {ok: true, id: btn.id};
}""")
if not clicked.get("ok"):
_log(f" [save] ERR : {clicked.get('reason')}")
return False
_log(f" [save] cliqué id={clicked.get('id')}")
page.wait_for_timeout(400)
try:
page.wait_for_load_state("networkidle", timeout=15_000)
except Exception:
pass
page.wait_for_timeout(500)
return True
except Exception as e:
_log(f" [save] ERR : {e}")
return False
# ── Synchronisation avec le serveur ──────────────────────────────────────────
def _pull_from_server(session: Session) -> dict[tuple, int]:
"""SSH → serveur, exporte EscadaPending en JSON, upsert en local.
Retourne un mapping (nom, prenom, classe, date_iso, periode) server_id
pour permettre le nettoyage côté serveur après push réussi.
"""
_log("PULL Récupération des modifications en attente depuis le serveur…")
cmd = (
f'ssh {_SSH_HOST} '
f'"cd {_SSH_REMOTE} && .venv/bin/python scripts/export_pending.py"'
)
try:
result = subprocess.run(
cmd, capture_output=True, text=True, timeout=30, shell=True
)
if result.returncode != 0:
_log(f" WARN SSH export_pending échoué : {result.stderr.strip()}")
return {}
raw = result.stdout.strip()
if not raw:
_log(" INFO Aucune modification en attente sur le serveur.")
return {}
entries = json.loads(raw)
except Exception as e:
_log(f" WARN Impossible de récupérer depuis le serveur : {e}")
return {}
if not entries:
_log(" INFO Aucune modification en attente sur le serveur.")
return {}
_log(f" {len(entries)} entrée(s) récupérée(s) du serveur")
server_id_map: dict[tuple, int] = {}
for entry in entries:
ap = session.execute(
select(Apprenti).where(
Apprenti.nom == entry["nom"],
Apprenti.prenom == entry["prenom"],
Apprenti.classe == entry["classe"],
)
).scalar_one_or_none()
if ap is None:
_log(
f" WARN apprenti introuvable localement : "
f"{entry['nom']} {entry['prenom']} / {entry['classe']}"
)
continue
d = date.fromisoformat(entry["date"])
upsert_escada_pending(session, ap.id, d, entry["periode"], entry["action"])
key = (entry["nom"], entry["prenom"], entry["classe"],
entry["date"], entry["periode"])
server_id_map[key] = entry["id"]
session.commit()
_log(f" {len(server_id_map)} entrée(s) fusionnée(s) dans la DB locale")
return server_id_map
def _clear_server_pending(server_ids: list[int]) -> None:
"""SSH → serveur pour supprimer les EscadaPending par IDs."""
if not server_ids:
return
ids_str = " ".join(str(i) for i in server_ids)
cmd = (
f'ssh {_SSH_HOST} '
f'"cd {_SSH_REMOTE} && .venv/bin/python scripts/clear_pending.py {ids_str}"'
)
try:
result = subprocess.run(
cmd, capture_output=True, text=True, timeout=30, shell=True
)
if result.returncode != 0:
_log(f" WARN SSH clear_pending échoué : {result.stderr.strip()}")
else:
_log(f" OK serveur nettoyé ({result.stdout.strip()})")
except Exception as e:
_log(f" WARN Impossible de nettoyer le serveur : {e}")
# ── Commande principale ───────────────────────────────────────────────────────
def cmd_count(session: Session) -> None:
"""Affiche le nombre de changements en attente."""
n = session.execute(select(EscadaPending)).scalars().all()
_log(f"PENDING_COUNT {len(n)}")
for ep in n:
ap = ep.apprenti
_log(f" {ap.classe} | {ap.nom} {ap.prenom} | {ep.date} P{ep.periode}{ep.action}")
def cmd_push(session: Session, test_mode: bool = False, no_pull: bool = False, debug: bool = False) -> None:
"""Pousse tous les changements en attente vers Escada.
1. Pull depuis le serveur (sauf --no-pull).
2. Lecture des EscadaPending locaux.
3. Navigation Playwright + mise à jour des dropdowns.
4. Nettoyage côté serveur pour les entrées syncées avec succès.
"""
# ── 1. Pull depuis le serveur ─────────────────────────────────────────────
server_id_map: dict[tuple, int] = {}
if not no_pull:
server_id_map = _pull_from_server(session)
else:
_log("INFO --no-pull : synchronisation serveur ignorée")
# ── 2. Lecture des EscadaPending locaux ───────────────────────────────────
q = select(EscadaPending).join(Apprenti, EscadaPending.apprenti_id == Apprenti.id)
if test_mode:
_log("INFO Mode test : Poidevin Alexandre / EM-AU 1 uniquement")
q = q.where(Apprenti.nom == "Poidevin", Apprenti.prenom == "Alexandre")
pending = session.execute(q).scalars().all()
if not pending:
_log("INFO Aucun changement en attente.")
return
# Grouper par (classe, date)
groups: dict[tuple, list] = {}
for ep in pending:
key = (ep.apprenti.classe, ep.date)
groups.setdefault(key, []).append(ep)
_log(f"TOTAL {len(groups)} groupe(s) à synchroniser ({len(pending)} changement(s))")
pw, ctx, page = _launch_context()
try:
_cache_load()
page.goto(CLASSES_URL)
_ensure_logged_in(page)
results = {"ok": [], "err": []}
# EscadaPending IDs locaux syncés avec succès → pour retrouver les server_ids
synced_eps: list[EscadaPending] = []
for i, ((classe, target_date), entries) in enumerate(sorted(groups.items()), 1):
_log(f"PROGRESS {i}/{len(groups)} {classe} {target_date}")
abs_page = _go_to_absence_page(page, classe)
if abs_page is None:
_log(f"ERR {classe} : page absences introuvable")
for ep in entries:
results["err"].append(f"{classe} {target_date} : navigation échouée")
continue
if not _set_date(abs_page, target_date):
_log(f"ERR {classe} {target_date} : changement de date échoué")
for ep in entries:
results["err"].append(f"{classe} {target_date} : date non changée")
continue
synced_ids = []
synced_ep_objs = []
for ep in entries:
ap = ep.apprenti
ok = _set_dropdown(abs_page, ap.nom, ap.prenom, ep.periode, ep.action)
if ok:
synced_ids.append(ep.id)
synced_ep_objs.append(ep)
else:
results["err"].append(
f"{classe} {target_date} {ap.nom} {ap.prenom} P{ep.periode}"
)
if not synced_ids:
_log(f" SKIP {classe} {target_date} : aucun dropdown modifié")
continue
if _save(abs_page):
for ep in synced_ep_objs:
obj = session.get(EscadaPending, ep.id)
if obj:
session.delete(obj)
# Marquer l'absence locale comme publiée sur Escada
ab = session.execute(
select(Absence).where(
Absence.apprenti_id == ep.apprenti_id,
Absence.date == ep.date,
Absence.periode == ep.periode,
)
).scalar_one_or_none()
if ab:
ab.statut = "publiee_escada"
session.commit()
_log(f"OK {classe} {target_date} : {len(synced_ids)} changement(s) sauvegardé(s)")
results["ok"].extend(synced_ids)
synced_eps.extend(synced_ep_objs)
else:
_log(f"ERR {classe} {target_date} : sauvegarde échouée")
results["err"].append(f"{classe} {target_date} : enregistrement échoué")
_log(f"PUSH_DONE {json.dumps({'ok': len(results['ok']), 'err': results['err']}, ensure_ascii=False)}")
# ── 4. Nettoyage côté serveur ─────────────────────────────────────────
if server_id_map and synced_eps:
server_ids_to_clear: list[int] = []
for ep in synced_eps:
ap = ep.apprenti
key = (ap.nom, ap.prenom, ap.classe, ep.date.isoformat(), ep.periode)
srv_id = server_id_map.get(key)
if srv_id is not None:
server_ids_to_clear.append(srv_id)
if server_ids_to_clear:
_clear_server_pending(server_ids_to_clear)
finally:
ctx.close()
pw.stop()
# ── Point d'entrée ────────────────────────────────────────────────────────────
if __name__ == "__main__":
import argparse
ap = argparse.ArgumentParser(description=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter)
ap.add_argument("--test", action="store_true", help="Limite au test Poidevin Alexandre")
ap.add_argument("--count", action="store_true", help="Affiche les changements en attente")
ap.add_argument("--no-pull", action="store_true", help="Ne pas récupérer les données du serveur avant push")
ap.add_argument("--pull-only", action="store_true", help="Récupère depuis le serveur sans pousser vers Escada")
ap.add_argument("--debug", action="store_true", help="Pause interactive après ouverture de la page absences")
args = ap.parse_args()
engine = init_db()
Session_ = sessionmaker(bind=engine)
with Session_() as sess:
if args.count:
cmd_count(sess)
elif args.pull_only:
_pull_from_server(sess)
else:
cmd_push(sess, test_mode=args.test, no_pull=args.no_pull, debug=args.debug)

152
scripts/run_imports.py Executable file
View file

@ -0,0 +1,152 @@
#!/usr/bin/env python3
"""
Lit sync_all_done.json, lance les imports PDF DB, écrit sync_last_result.json.
Lancé comme subprocess indépendant par reset_sync (start_new_session=True).
Args: <data_dir> <username> <force_abs:0|1>
"""
import json, sys
from pathlib import Path
from datetime import datetime
_ROOT = Path(__file__).resolve().parent.parent
sys.path.insert(0, str(_ROOT))
DATA_DIR = Path(sys.argv[1]) if len(sys.argv) > 1 else _ROOT / "data"
USERNAME = sys.argv[2] if len(sys.argv) > 2 else "escada"
FORCE_ABS = len(sys.argv) > 3 and sys.argv[3] == "1"
ALL_DONE = DATA_DIR / "sync_all_done.json"
RESULT = DATA_DIR / "sync_last_result.json"
from src.logger import app_log
app_log("[run_imports] démarré")
try:
raw = json.loads(ALL_DONE.read_text(encoding="utf-8"))
payload = raw.get("payload", {})
except Exception as e:
app_log(f"[run_imports] ERREUR lecture all_done: {e}")
sys.exit(1)
from src.db import get_session
from src.importer import import_pdf as do_import
from src.importer_bn import import_bn as do_import_bn
from src.importer_matu import import_matu as do_import_matu
from src.importer_notes import import_notes_pdf
from src.db import Apprenti, EscadaPending, upsert_apprenti_fiche, _norm_prenom
from sqlalchemy import select
abs_pdfs = payload.get("abs", [])
bn_pdfs = payload.get("bn", [])
matu_pdfs = payload.get("matu", [])
notes_pdfs = payload.get("notes", [])
fiches = payload.get("fiches", {})
errors = list(payload.get("errors", []))
res_abs = []
for pdf_path in abs_pdfs:
sess = get_session()
try:
r = do_import(Path(pdf_path), sess, imported_by=USERNAME, force=FORCE_ABS)
detail = f"{r.nb_absences_nouvelles} nouvelles"
if r.nb_absences_mises_a_jour:
detail += f", {r.nb_absences_mises_a_jour} maj"
res_abs.append({"classe": r.classe, "detail": detail})
app_log(f"[run_imports] abs {r.classe}: {detail}")
except Exception as e:
errors.append(f"Import abs {Path(pdf_path).name}: {e}")
app_log(f"[run_imports] erreur abs: {e}")
finally:
sess.close()
res_bn = []
for pdf_path in bn_pdfs:
sess = get_session()
try:
r = do_import_bn(Path(pdf_path), sess, imported_by=USERNAME)
res_bn.append({"classe": r.classe, "nb": str(r.nb_apprentis)})
app_log(f"[run_imports] BN {r.classe}: {r.nb_apprentis}")
except Exception as e:
errors.append(f"Import BN {Path(pdf_path).name}: {e}")
app_log(f"[run_imports] erreur BN: {e}")
finally:
sess.close()
res_notes = []
for pdf_path in notes_pdfs:
sess = get_session()
try:
r = import_notes_pdf(Path(pdf_path), sess)
res_notes.append({"classe": r["classe"], "nb": str(r["nb"])})
app_log(f"[run_imports] notes {r['classe']}: {r['nb']} apprenti(s)")
except Exception as e:
errors.append(f"Import notes {Path(pdf_path).name}: {e}")
app_log(f"[run_imports] erreur notes: {e}")
finally:
sess.close()
res_matu = []
for pdf_path in matu_pdfs:
sess = get_session()
try:
r, unmatched = do_import_matu(Path(pdf_path), sess, imported_by=USERNAME)
res_matu.append({
"classe": r.classe_mp or Path(pdf_path).stem,
"nb": str(r.nb_apprentis),
"unmatched": ", ".join(unmatched) if unmatched else "",
})
app_log(f"[run_imports] matu {r.classe_mp}: {r.nb_apprentis}")
except Exception as e:
errors.append(f"Import matu {Path(pdf_path).name}: {e}")
app_log(f"[run_imports] erreur matu: {e}")
finally:
sess.close()
if fiches:
sess = get_session()
try:
for cls, fiches_list in fiches.items():
cls_apprentis = sess.execute(
select(Apprenti).where(Apprenti.classe == cls)
).scalars().all()
for fiche in fiches_list:
nom_eleve = fiche.get("nom_eleve", "")
if not nom_eleve:
continue
nom_norm = _norm_prenom(nom_eleve)
ap = None
for a in cls_apprentis:
full_norm = _norm_prenom(f"{a.nom} {a.prenom}")
if full_norm == nom_norm or full_norm.startswith(nom_norm + " "):
ap = a
break
if ap:
upsert_apprenti_fiche(sess, ap.id, fiche)
sess.commit()
app_log(f"[run_imports] fiches: {sum(len(v) for v in fiches.values())} entrées")
except Exception as e:
errors.append(f"Import fiches: {e}")
sess.rollback()
app_log(f"[run_imports] erreur fiches: {e}")
finally:
sess.close()
result = {
"timestamp": datetime.now().isoformat(),
"res_abs": res_abs,
"res_bn": res_bn,
"res_notes": res_notes,
"res_matu": res_matu,
"errors": errors,
"op_log": "",
}
try:
DATA_DIR.mkdir(parents=True, exist_ok=True)
RESULT.write_text(json.dumps(result, ensure_ascii=False), encoding="utf-8")
app_log(f"[run_imports] résultats sauvegardés — notes={len(res_notes)} abs={len(res_abs)} erreurs={len(errors)}")
except Exception as e:
app_log(f"[run_imports] ERREUR sauvegarde: {e}")
sys.exit(1)
app_log("[run_imports] terminé OK")

1782
scripts/sync_esacada.py Normal file

File diff suppressed because it is too large Load diff

125
src/importer_notes.py Normal file
View file

@ -0,0 +1,125 @@
import json
import re
from pathlib import Path
from sqlalchemy import select, delete
from sqlalchemy.orm import Session
from .db import Apprenti, NotesExamen
def parse_notes_pdf(pdf_path: Path, nom: str, prenom: str) -> list[dict] | None:
"""Parse le PDF de notes d'examen pour un apprenti donné."""
try:
import pdfplumber as _pp
except ImportError:
return None
_RE_BR = re.compile(
r"^(.+?)\s+[A-Z][A-Z0-9\-]+(?: \d+)?\s+\((\d+\.\d+)\)\s+(\d+\.\d+)$"
)
_RE_DT = re.compile(r"^(\d{2}\.\d{2}\.\d{4})\s+(.+)$")
def _exam(line: str) -> dict | None:
m = _RE_DT.match(line)
if not m:
return None
date, rest = m.group(1), m.group(2).strip()
tok = rest.split()
note = None
disp = False
i = len(tok) - 1
if i >= 0 and re.match(r"^\d+\.\d+$", tok[i]):
note = float(tok[i]); i -= 1
elif i >= 0 and tok[i].lower() in ("disp.", "disp"):
note = "disp."; i -= 1
if i >= 0 and tok[i].lower() == "x":
disp = True; i -= 1
typ = tok[i] if i >= 0 else ""; i -= 1
coeff = None
if i >= 0 and re.match(r"^\d+\.\d+$", tok[i]):
coeff = float(tok[i]); i -= 1
ens = ""
if i >= 0 and re.match(r"^[A-Z]{3,8}$", tok[i]):
ens = tok[i]; i -= 1
desc = " ".join(tok[: i + 1])
if coeff is None and not ens:
return None
return {
"date": date, "description": desc, "enseignant": ens,
"coefficient": coeff, "type": typ, "dispensed": disp, "note": note,
}
_SKIP = {
"Departement", "Service", "Ecole professionnelle", "Chemin",
"Case postale", "Rue", "Monthey", "Sion", "Seite", "Liste interm",
"Classe:", "Absences", "Matiere", "Date Examen",
"Branches de culture", "Branches professionnelles",
}
try:
nom_l = nom.lower()
prenom_l = prenom.lower()
with _pp.open(str(pdf_path)) as pdf:
pages = [
p.extract_text(x_tolerance=2) or ""
for p in pdf.pages
if nom_l in (p.extract_text(x_tolerance=2) or "").lower()
and prenom_l in (p.extract_text(x_tolerance=2) or "").lower()
]
if not pages:
return None
lines = [ln.strip() for ln in "\n".join(pages).splitlines() if ln.strip()]
branches: list[dict] = []
cur: dict | None = None
for line in lines:
if any(kw in line for kw in _SKIP):
continue
if re.match(r"^\d{4}$", line):
continue
m = _RE_BR.match(line)
if m:
cur = {
"branche": m.group(1).strip(),
"moy_prov": float(m.group(2)),
"moy_arr": float(m.group(3)),
"examens": [],
}
branches.append(cur)
continue
if cur and re.match(r"^\d{2}\.\d{2}\.\d{4}", line):
e = _exam(line)
if e:
cur["examens"].append(e)
return branches or None
except Exception:
return None
def import_notes_pdf(pdf_path: Path, sess: Session, classe: str | None = None) -> dict:
"""Importe les notes d'examen depuis un PDF pour tous les apprentis de la classe.
Retourne {"classe": str, "nb": int}.
"""
p = Path(pdf_path)
if classe is None:
classe = p.stem.replace("notes_", "").replace("_", " ")
apprentis = sess.execute(
select(Apprenti).where(Apprenti.classe == classe)
).scalars().all()
ne_ids = [ap.id for ap in apprentis]
if ne_ids:
sess.execute(delete(NotesExamen).where(NotesExamen.apprenti_id.in_(ne_ids)))
nb = 0
for ap in apprentis:
branches = parse_notes_pdf(p, ap.nom, ap.prenom)
if branches is None:
continue
sess.add(NotesExamen(
apprenti_id=ap.id,
donnees_json=json.dumps(branches, ensure_ascii=False),
))
nb += 1
sess.commit()
return {"classe": classe, "nb": nb}

27
src/logger.py Normal file
View file

@ -0,0 +1,27 @@
import os
from datetime import datetime
from pathlib import Path
from zoneinfo import ZoneInfo
_TZ = ZoneInfo("Europe/Zurich")
_ROOT = Path(__file__).resolve().parent.parent
DATA_DIR = Path(os.getenv("DATA_DIR", str(_ROOT / "data")))
_LOG_FILE = DATA_DIR / "logs" / "operations.log"
def app_log(msg: str, debug: bool = False) -> None:
"""Écrit une entrée dans operations.log.
PROD (debug=False) : [HH:MM:SS] msg ligne non indentée, visible en mode PROD
DEBUG (debug=True) : [HH:MM:SS] msg ligne indentée, visible seulement en mode DEBUG
"""
ts = datetime.now(tz=_TZ).strftime("%H:%M:%S")
indent = " " if debug else ""
line = f"[{ts}] {indent}{msg}"
try:
_LOG_FILE.parent.mkdir(parents=True, exist_ok=True)
with _LOG_FILE.open("a", encoding="utf-8") as f:
f.write(line + "\n")
except Exception:
pass