577 lines
26 KiB
Python
577 lines
26 KiB
Python
"""Modèles SQLAlchemy et initialisation de la base de données."""
|
|
|
|
import sys
|
|
import unicodedata
|
|
from datetime import date, datetime
|
|
from pathlib import Path
|
|
from typing import Optional
|
|
|
|
from sqlalchemy import ForeignKey, String, Text, UniqueConstraint, create_engine, select, text
|
|
from sqlalchemy.orm import (
|
|
DeclarativeBase,
|
|
Mapped,
|
|
Session,
|
|
mapped_column,
|
|
relationship,
|
|
sessionmaker,
|
|
)
|
|
|
|
DB_PATH = Path(__file__).parent.parent / "data" / "absences.db"
|
|
|
|
STATUTS = {"a_traiter", "excusee", "non_excusee", "en_attente_justificatif", "publiee_escada"}
|
|
|
|
|
|
class Base(DeclarativeBase):
|
|
pass
|
|
|
|
|
|
class Apprenti(Base):
|
|
__tablename__ = "apprentis"
|
|
__table_args__ = (UniqueConstraint("nom", "prenom", "classe"),)
|
|
|
|
id: Mapped[int] = mapped_column(primary_key=True)
|
|
nom: Mapped[str]
|
|
prenom: Mapped[str]
|
|
classe: Mapped[str]
|
|
created_at: Mapped[datetime] = mapped_column(default=datetime.now)
|
|
|
|
absences: Mapped[list["Absence"]] = relationship(back_populates="apprenti")
|
|
notes_bulletin: Mapped[list["NotesBulletin"]] = relationship(back_populates="apprenti")
|
|
notes_matu: Mapped[list["NotesMatu"]] = relationship(back_populates="apprenti")
|
|
fiche: Mapped[Optional["ApprentiFiche"]] = relationship(back_populates="apprenti", uselist=False)
|
|
notes_examen: Mapped[Optional["NotesExamen"]] = relationship(back_populates="apprenti", uselist=False)
|
|
|
|
|
|
class Import(Base):
|
|
__tablename__ = "imports"
|
|
|
|
id: Mapped[int] = mapped_column(primary_key=True)
|
|
date_import: Mapped[datetime] = mapped_column(default=datetime.now)
|
|
fichier: Mapped[str]
|
|
classe: Mapped[str]
|
|
semestre: Mapped[str]
|
|
nb_apprentis: Mapped[int] = mapped_column(default=0)
|
|
nb_absences_nouvelles: Mapped[int] = mapped_column(default=0)
|
|
nb_absences_doublons: Mapped[int] = mapped_column(default=0)
|
|
imported_by: Mapped[str]
|
|
|
|
absences: Mapped[list["Absence"]] = relationship(back_populates="import_ref")
|
|
|
|
|
|
class Absence(Base):
|
|
__tablename__ = "absences"
|
|
__table_args__ = (UniqueConstraint("apprenti_id", "date", "periode"),)
|
|
|
|
id: Mapped[int] = mapped_column(primary_key=True)
|
|
apprenti_id: Mapped[int] = mapped_column(ForeignKey("apprentis.id"))
|
|
date: Mapped[date]
|
|
periode: Mapped[int]
|
|
# type_origine = valeur du PDF, immuable après import
|
|
type_origine: Mapped[str]
|
|
statut: Mapped[str] = mapped_column(default="a_traiter")
|
|
justificatif_recu: Mapped[bool] = mapped_column(default=False)
|
|
justificatif_date: Mapped[Optional[date]] = mapped_column(nullable=True)
|
|
notes: Mapped[Optional[str]] = mapped_column(String, nullable=True)
|
|
import_id: Mapped[Optional[int]] = mapped_column(
|
|
ForeignKey("imports.id"), nullable=True
|
|
)
|
|
updated_at: Mapped[datetime] = mapped_column(
|
|
default=datetime.now, onupdate=datetime.now
|
|
)
|
|
updated_by: Mapped[Optional[str]] = mapped_column(String, nullable=True)
|
|
|
|
apprenti: Mapped["Apprenti"] = relationship(back_populates="absences")
|
|
import_ref: Mapped[Optional["Import"]] = relationship(back_populates="absences")
|
|
|
|
|
|
class EscadaPending(Base):
|
|
"""File d'attente des changements à pousser vers Escada."""
|
|
__tablename__ = "escada_pending"
|
|
__table_args__ = (UniqueConstraint("apprenti_id", "date", "periode"),)
|
|
|
|
id: Mapped[int] = mapped_column(primary_key=True)
|
|
apprenti_id: Mapped[int] = mapped_column(ForeignKey("apprentis.id"))
|
|
date: Mapped[date]
|
|
periode: Mapped[int]
|
|
action: Mapped[str] # "E" | "N" | "clear"
|
|
created_at: Mapped[datetime] = mapped_column(default=datetime.now)
|
|
|
|
apprenti: Mapped["Apprenti"] = relationship()
|
|
|
|
|
|
class ImportBN(Base):
|
|
__tablename__ = "imports_bn"
|
|
|
|
id: Mapped[int] = mapped_column(primary_key=True)
|
|
date_import: Mapped[datetime] = mapped_column(default=datetime.now)
|
|
fichier: Mapped[str]
|
|
classe: Mapped[str]
|
|
type_classe: Mapped[str] # "EM" or "DUAL"
|
|
nb_apprentis: Mapped[int] = mapped_column(default=0)
|
|
imported_by: Mapped[str]
|
|
|
|
notes: Mapped[list["NotesBulletin"]] = relationship(back_populates="import_ref")
|
|
|
|
|
|
class NotesBulletin(Base):
|
|
__tablename__ = "notes_bulletin"
|
|
__table_args__ = (UniqueConstraint("apprenti_id", "import_id"),)
|
|
|
|
id: Mapped[int] = mapped_column(primary_key=True)
|
|
apprenti_id: Mapped[int] = mapped_column(ForeignKey("apprentis.id"))
|
|
import_id: Mapped[int] = mapped_column(ForeignKey("imports_bn.id"))
|
|
type_classe: Mapped[str] # "EM" or "DUAL"
|
|
sem_labels_json: Mapped[str] = mapped_column(Text) # JSON list[str|None] len=8
|
|
donnees_json: Mapped[str] = mapped_column(Text) # JSON {groupes:{…}, globale:{…}}
|
|
imported_at: Mapped[datetime] = mapped_column(default=datetime.now)
|
|
|
|
apprenti: Mapped["Apprenti"] = relationship(back_populates="notes_bulletin")
|
|
import_ref: Mapped["ImportBN"] = relationship(back_populates="notes")
|
|
|
|
|
|
class ImportMatu(Base):
|
|
__tablename__ = "imports_matu"
|
|
|
|
id: Mapped[int] = mapped_column(primary_key=True)
|
|
date_import: Mapped[datetime] = mapped_column(default=datetime.now)
|
|
fichier: Mapped[str]
|
|
classe_mp: Mapped[str]
|
|
sem_label: Mapped[str] # e.g. "25-26 2"
|
|
nb_apprentis: Mapped[int] = mapped_column(default=0)
|
|
imported_by: Mapped[str]
|
|
|
|
notes: Mapped[list["NotesMatu"]] = relationship(back_populates="import_ref")
|
|
|
|
|
|
class NotesMatu(Base):
|
|
__tablename__ = "notes_matu"
|
|
__table_args__ = (UniqueConstraint("apprenti_id", "import_id"),)
|
|
|
|
id: Mapped[int] = mapped_column(primary_key=True)
|
|
apprenti_id: Mapped[int] = mapped_column(ForeignKey("apprentis.id"))
|
|
import_id: Mapped[int] = mapped_column(ForeignKey("imports_matu.id"))
|
|
classe_mp: Mapped[str]
|
|
sem_label: Mapped[str] # e.g. "25-26 2"
|
|
moy: Mapped[Optional[float]] = mapped_column(nullable=True)
|
|
promotion: Mapped[Optional[str]] = mapped_column(String, nullable=True) # "B" / "P" / "NB"
|
|
prom_info: Mapped[Optional[str]] = mapped_column(String, nullable=True) # for NB: "25-26 1"
|
|
imported_at: Mapped[datetime] = mapped_column(default=datetime.now)
|
|
|
|
apprenti: Mapped["Apprenti"] = relationship(back_populates="notes_matu")
|
|
import_ref: Mapped["ImportMatu"] = relationship(back_populates="notes")
|
|
|
|
|
|
class ApprentiFiche(Base):
|
|
"""Données personnelles scrapées depuis Escada (ViewLernende)."""
|
|
__tablename__ = "apprenti_fiches"
|
|
|
|
id: Mapped[int] = mapped_column(primary_key=True)
|
|
apprenti_id: Mapped[int] = mapped_column(ForeignKey("apprentis.id"), unique=True)
|
|
|
|
# Élève
|
|
adresse: Mapped[Optional[str]] = mapped_column(String, nullable=True)
|
|
code_postal: Mapped[Optional[str]] = mapped_column(String, nullable=True)
|
|
localite: Mapped[Optional[str]] = mapped_column(String, nullable=True)
|
|
telephone: Mapped[Optional[str]] = mapped_column(String, nullable=True)
|
|
email: Mapped[Optional[str]] = mapped_column(String, nullable=True)
|
|
date_naissance: Mapped[Optional[str]] = mapped_column(String, nullable=True)
|
|
majeur: Mapped[Optional[bool]] = mapped_column(nullable=True)
|
|
# Compensation des désavantages (Nachteilsausgleich) — True si accordée,
|
|
# False sinon, None si la donnée n'a pas été scrapée
|
|
compensation_desavantages: Mapped[Optional[bool]] = mapped_column(nullable=True)
|
|
|
|
# Entreprise
|
|
entreprise_nom: Mapped[Optional[str]] = mapped_column(String, nullable=True)
|
|
entreprise_adresse: Mapped[Optional[str]] = mapped_column(String, nullable=True)
|
|
entreprise_code_postal: Mapped[Optional[str]] = mapped_column(String, nullable=True)
|
|
entreprise_localite: Mapped[Optional[str]] = mapped_column(String, nullable=True)
|
|
entreprise_telephone: Mapped[Optional[str]] = mapped_column(String, nullable=True)
|
|
entreprise_email: Mapped[Optional[str]] = mapped_column(String, nullable=True)
|
|
|
|
# Formateur
|
|
formateur_nom: Mapped[Optional[str]] = mapped_column(String, nullable=True)
|
|
formateur_email: Mapped[Optional[str]] = mapped_column(String, nullable=True)
|
|
|
|
# Représentant légal (uniquement pour les mineurs ; scrapé depuis le PDF
|
|
# "Liste des classes" sur Escada).
|
|
resp_legal_nom: Mapped[Optional[str]] = mapped_column(String, nullable=True)
|
|
resp_legal_adresse: Mapped[Optional[str]] = mapped_column(String, nullable=True)
|
|
resp_legal_code_postal: Mapped[Optional[str]] = mapped_column(String, nullable=True)
|
|
resp_legal_localite: Mapped[Optional[str]] = mapped_column(String, nullable=True)
|
|
resp_legal_telephone_p: Mapped[Optional[str]] = mapped_column(String, nullable=True) # fixe
|
|
resp_legal_telephone_n: Mapped[Optional[str]] = mapped_column(String, nullable=True) # mobile
|
|
|
|
# Profession dérivée du préfixe de classe (mapping dans data/settings.json)
|
|
profession: Mapped[Optional[str]] = mapped_column(String, nullable=True)
|
|
|
|
updated_at: Mapped[datetime] = mapped_column(default=datetime.now, onupdate=datetime.now)
|
|
|
|
apprenti: Mapped["Apprenti"] = relationship(back_populates="fiche")
|
|
|
|
|
|
class NotesExamen(Base):
|
|
"""Notes d'examen parsées depuis le PDF Escada, 1 enregistrement par apprenti."""
|
|
__tablename__ = "notes_examen"
|
|
|
|
id: Mapped[int] = mapped_column(primary_key=True)
|
|
apprenti_id: Mapped[int] = mapped_column(ForeignKey("apprentis.id"), unique=True)
|
|
donnees_json: Mapped[str] = mapped_column(Text)
|
|
updated_at: Mapped[datetime] = mapped_column(default=datetime.now, onupdate=datetime.now)
|
|
|
|
apprenti: Mapped["Apprenti"] = relationship(back_populates="notes_examen")
|
|
|
|
|
|
class Notice(Base):
|
|
"""Note à pousser sur Escada (liée à un apprenti).
|
|
|
|
Créée notamment lors de la génération d'un avis de retenue (si la case
|
|
correspondante est cochée). Supprimée après push réussi.
|
|
"""
|
|
__tablename__ = "notices"
|
|
|
|
id: Mapped[int] = mapped_column(primary_key=True)
|
|
apprenti_id: Mapped[int] = mapped_column(ForeignKey("apprentis.id"))
|
|
date_event: Mapped[date]
|
|
titre: Mapped[str] = mapped_column(Text)
|
|
remarque: Mapped[Optional[str]] = mapped_column(Text, nullable=True)
|
|
type_notice: Mapped[Optional[str]] = mapped_column(String, nullable=True)
|
|
matiere: Mapped[Optional[str]] = mapped_column(String, nullable=True)
|
|
source: Mapped[str] = mapped_column(default="manual") # "retenue" pour le moment
|
|
status: Mapped[str] = mapped_column(default="pending") # "pending" | "failed"
|
|
created_at: Mapped[datetime] = mapped_column(default=datetime.now)
|
|
created_by: Mapped[Optional[str]] = mapped_column(String, nullable=True)
|
|
error_msg: Mapped[Optional[str]] = mapped_column(Text, nullable=True)
|
|
|
|
apprenti: Mapped["Apprenti"] = relationship()
|
|
|
|
|
|
class ApprentiNotice(Base):
|
|
"""Notices scrapées depuis Escada (read-only côté app, pas re-poussées).
|
|
|
|
Stratégie : à chaque pull, on supprime toutes les ApprentiNotice de
|
|
l'apprenti puis on ré-insère depuis Escada (full replace).
|
|
"""
|
|
__tablename__ = "apprenti_notices"
|
|
|
|
id: Mapped[int] = mapped_column(primary_key=True)
|
|
apprenti_id: Mapped[int] = mapped_column(ForeignKey("apprentis.id"))
|
|
date_event: Mapped[date]
|
|
type_notice: Mapped[Optional[str]] = mapped_column(String, nullable=True)
|
|
auteur: Mapped[Optional[str]] = mapped_column(String, nullable=True)
|
|
titre: Mapped[Optional[str]] = mapped_column(Text, nullable=True)
|
|
remarque: Mapped[Optional[str]] = mapped_column(Text, nullable=True)
|
|
matiere: Mapped[Optional[str]] = mapped_column(String, nullable=True)
|
|
visible_classe: Mapped[Optional[bool]] = mapped_column(nullable=True)
|
|
imported_at: Mapped[datetime] = mapped_column(default=datetime.now)
|
|
|
|
apprenti: Mapped["Apprenti"] = relationship()
|
|
|
|
|
|
class SanctionExport(Base):
|
|
__tablename__ = "sanctions_export"
|
|
|
|
id: Mapped[int] = mapped_column(primary_key=True)
|
|
apprenti_id: Mapped[int] = mapped_column(ForeignKey("apprentis.id"))
|
|
date_export: Mapped[datetime] = mapped_column(default=datetime.now)
|
|
exported_by: Mapped[str]
|
|
nb_absences: Mapped[Optional[int]] = mapped_column(nullable=True)
|
|
|
|
apprenti: Mapped["Apprenti"] = relationship()
|
|
|
|
|
|
class CronJob(Base):
|
|
"""Tâche planifiée (cron) pour pull/push Escada automatique."""
|
|
__tablename__ = "cron_jobs"
|
|
|
|
id: Mapped[int] = mapped_column(primary_key=True)
|
|
name: Mapped[str]
|
|
enabled: Mapped[bool] = mapped_column(default=True)
|
|
|
|
# schedule_kind ∈ {"daily", "weekly", "interval"}
|
|
# daily : schedule_value="HH:MM"
|
|
# weekly : schedule_value="MON,TUE,WED,THU,FRI:HH:MM"
|
|
# interval: schedule_value="60" (minutes)
|
|
schedule_kind: Mapped[str] = mapped_column(default="daily")
|
|
schedule_value: Mapped[str] = mapped_column(default="03:00")
|
|
|
|
# task_kind ∈ {"push", "sync", "push_then_sync"}
|
|
# Les sous-options sync_* déterminent _sur quoi_ le push/sync agit :
|
|
# push : push_to_escada.py si sync_abs, et/ou push_notices.py si sync_notices
|
|
# sync : sync_esacada.py si une de {sync_abs, sync_bn, sync_notes, sync_fiches},
|
|
# et/ou pull_notices.py si sync_notices
|
|
task_kind: Mapped[str] = mapped_column(default="push_then_sync")
|
|
|
|
# Sous-options : quelles données traiter
|
|
sync_abs: Mapped[bool] = mapped_column(default=True)
|
|
sync_bn: Mapped[bool] = mapped_column(default=True)
|
|
sync_notes: Mapped[bool] = mapped_column(default=True)
|
|
sync_fiches: Mapped[bool] = mapped_column(default=False)
|
|
sync_notices: Mapped[bool] = mapped_column(default=False)
|
|
force_abs: Mapped[bool] = mapped_column(default=False)
|
|
|
|
# Liste de classes en JSON, ou "ALL" pour toutes
|
|
classes_json: Mapped[str] = mapped_column(default="ALL")
|
|
|
|
# Notifications
|
|
# notify_on ∈ {"never", "always", "success", "failure"}
|
|
notify_on: Mapped[str] = mapped_column(default="failure")
|
|
# notify_level ∈ {"normal", "detailed"}
|
|
notify_level: Mapped[str] = mapped_column(default="normal")
|
|
notify_chat_id: Mapped[str] = mapped_column(default="") # override config global
|
|
|
|
# État de la dernière exécution
|
|
last_run_at: Mapped[Optional[datetime]] = mapped_column(nullable=True)
|
|
last_status: Mapped[str] = mapped_column(default="") # "ok"|"fail"|"running"|""
|
|
last_message: Mapped[str] = mapped_column(Text, default="")
|
|
last_log_path: Mapped[str] = mapped_column(default="")
|
|
last_pid: Mapped[Optional[int]] = mapped_column(nullable=True)
|
|
|
|
created_at: Mapped[datetime] = mapped_column(default=datetime.now)
|
|
updated_at: Mapped[datetime] = mapped_column(default=datetime.now)
|
|
|
|
|
|
class FeedbackMessage(Base):
|
|
"""Message de feedback utilisateur (bug / proposition) collecté via le
|
|
widget chat in-app. Géré depuis la page admin /feedback."""
|
|
__tablename__ = "feedback_messages"
|
|
|
|
id: Mapped[int] = mapped_column(primary_key=True)
|
|
created_at: Mapped[datetime] = mapped_column(default=datetime.now)
|
|
created_by: Mapped[str] # username
|
|
user_email: Mapped[Optional[str]] = mapped_column(String, nullable=True)
|
|
type: Mapped[str] # "bug" | "feature"
|
|
message: Mapped[str] = mapped_column(Text)
|
|
context_url: Mapped[Optional[str]] = mapped_column(String, nullable=True) # page d'origine
|
|
status: Mapped[str] = mapped_column(default="new") # "new" | "in_progress" | "resolved"
|
|
admin_response: Mapped[Optional[str]] = mapped_column(Text, nullable=True)
|
|
response_sent_at: Mapped[Optional[datetime]] = mapped_column(nullable=True)
|
|
|
|
|
|
def get_engine(db_url: str | None = None):
|
|
url = db_url or f"sqlite:///{DB_PATH}"
|
|
from sqlalchemy import event as _sa_event
|
|
engine = create_engine(url, connect_args={"check_same_thread": False})
|
|
|
|
@_sa_event.listens_for(engine, "connect")
|
|
def _set_wal(dbapi_conn, _rec):
|
|
dbapi_conn.execute("PRAGMA journal_mode=WAL")
|
|
dbapi_conn.execute("PRAGMA busy_timeout=10000")
|
|
|
|
return engine
|
|
|
|
|
|
def init_db(engine=None):
|
|
"""Crée toutes les tables. Idempotent."""
|
|
if engine is None:
|
|
engine = get_engine()
|
|
Base.metadata.create_all(engine)
|
|
with engine.connect() as _conn:
|
|
for stmt in (
|
|
"ALTER TABLE sanctions_export ADD COLUMN nb_absences INTEGER",
|
|
"ALTER TABLE cron_jobs ADD COLUMN notify_level TEXT DEFAULT 'normal'",
|
|
"ALTER TABLE apprenti_fiches ADD COLUMN profession TEXT",
|
|
"ALTER TABLE apprenti_fiches ADD COLUMN compensation_desavantages BOOLEAN",
|
|
"ALTER TABLE apprenti_fiches ADD COLUMN resp_legal_nom TEXT",
|
|
"ALTER TABLE apprenti_fiches ADD COLUMN resp_legal_adresse TEXT",
|
|
"ALTER TABLE apprenti_fiches ADD COLUMN resp_legal_code_postal TEXT",
|
|
"ALTER TABLE apprenti_fiches ADD COLUMN resp_legal_localite TEXT",
|
|
"ALTER TABLE apprenti_fiches ADD COLUMN resp_legal_telephone_p TEXT",
|
|
"ALTER TABLE apprenti_fiches ADD COLUMN resp_legal_telephone_n TEXT",
|
|
"ALTER TABLE cron_jobs ADD COLUMN sync_notices BOOLEAN DEFAULT 0",
|
|
# Migration cron task_kind — schéma 3 valeurs + checkbox sync_notices.
|
|
# Étape A : pour les rows qui ciblaient les notices, on flag sync_notices=1
|
|
# et on désactive les autres data flags (avant de perdre l'info en B).
|
|
"""UPDATE cron_jobs SET
|
|
sync_notices = 1,
|
|
sync_abs = 0,
|
|
sync_bn = 0,
|
|
sync_notes = 0,
|
|
sync_fiches = 0
|
|
WHERE task_kind IN ('notices_push','notices_sync','notices_push_then_sync','push_notices')""",
|
|
# Étape B : on normalise task_kind sur les 3 valeurs canoniques.
|
|
"UPDATE cron_jobs SET task_kind='push' WHERE task_kind IN ('absences_push','notices_push')",
|
|
"UPDATE cron_jobs SET task_kind='sync' WHERE task_kind IN ('absences_sync','notices_sync')",
|
|
"UPDATE cron_jobs SET task_kind='push_then_sync' WHERE task_kind IN ('absences_push_then_sync','notices_push_then_sync','push_notices')",
|
|
"""CREATE TABLE IF NOT EXISTS apprenti_notices (
|
|
id INTEGER PRIMARY KEY,
|
|
apprenti_id INTEGER NOT NULL REFERENCES apprentis(id),
|
|
date_event DATE NOT NULL,
|
|
type_notice TEXT,
|
|
auteur TEXT,
|
|
titre TEXT,
|
|
remarque TEXT,
|
|
matiere TEXT,
|
|
visible_classe BOOLEAN,
|
|
imported_at DATETIME NOT NULL
|
|
)""",
|
|
"""CREATE TABLE IF NOT EXISTS notices (
|
|
id INTEGER PRIMARY KEY,
|
|
apprenti_id INTEGER NOT NULL REFERENCES apprentis(id),
|
|
date_event DATE NOT NULL,
|
|
titre TEXT NOT NULL,
|
|
remarque TEXT,
|
|
type_notice TEXT,
|
|
matiere TEXT,
|
|
source TEXT NOT NULL DEFAULT 'manual',
|
|
status TEXT NOT NULL DEFAULT 'pending',
|
|
created_at DATETIME NOT NULL,
|
|
created_by TEXT,
|
|
error_msg TEXT
|
|
)""",
|
|
"""CREATE TABLE IF NOT EXISTS escada_pending (
|
|
id INTEGER PRIMARY KEY,
|
|
apprenti_id INTEGER NOT NULL REFERENCES apprentis(id),
|
|
date DATE NOT NULL,
|
|
periode INTEGER NOT NULL,
|
|
action TEXT NOT NULL,
|
|
created_at DATETIME NOT NULL,
|
|
UNIQUE (apprenti_id, date, periode)
|
|
)""",
|
|
):
|
|
try:
|
|
_conn.execute(text(stmt))
|
|
_conn.commit()
|
|
except Exception:
|
|
pass
|
|
return engine
|
|
|
|
|
|
def upsert_apprenti_fiche(session: Session, apprenti_id: int, data: dict) -> None:
|
|
"""Crée ou met à jour la fiche personnelle d'un apprenti."""
|
|
existing = session.execute(
|
|
select(ApprentiFiche).where(ApprentiFiche.apprenti_id == apprenti_id)
|
|
).scalar_one_or_none()
|
|
fields = [
|
|
"adresse", "code_postal", "localite", "telephone", "email",
|
|
"date_naissance", "majeur", "compensation_desavantages",
|
|
"entreprise_nom", "entreprise_adresse", "entreprise_code_postal",
|
|
"entreprise_localite", "entreprise_telephone", "entreprise_email",
|
|
"formateur_nom", "formateur_email",
|
|
"profession",
|
|
"resp_legal_nom", "resp_legal_adresse", "resp_legal_code_postal",
|
|
"resp_legal_localite", "resp_legal_telephone_p", "resp_legal_telephone_n",
|
|
]
|
|
if existing:
|
|
for f in fields:
|
|
if f in data:
|
|
setattr(existing, f, data[f])
|
|
existing.updated_at = datetime.now()
|
|
else:
|
|
session.add(ApprentiFiche(
|
|
apprenti_id=apprenti_id,
|
|
**{f: data.get(f) for f in fields},
|
|
))
|
|
|
|
|
|
def upsert_escada_pending(
|
|
session: Session, apprenti_id: int, d: "date", periode: int, action: str
|
|
) -> None:
|
|
"""Ajoute ou met à jour une entrée dans la file d'attente Escada."""
|
|
existing = session.execute(
|
|
select(EscadaPending).where(
|
|
EscadaPending.apprenti_id == apprenti_id,
|
|
EscadaPending.date == d,
|
|
EscadaPending.periode == periode,
|
|
)
|
|
).scalar_one_or_none()
|
|
if existing:
|
|
existing.action = action
|
|
existing.created_at = datetime.now()
|
|
else:
|
|
session.add(EscadaPending(
|
|
apprenti_id=apprenti_id, date=d, periode=periode, action=action,
|
|
))
|
|
|
|
|
|
def _norm_prenom(p: str) -> str:
|
|
"""Lowercase + strip accents for prenom comparison."""
|
|
nfkd = unicodedata.normalize("NFKD", p)
|
|
return " ".join(nfkd.encode("ascii", "ignore").decode("ascii").lower().split())
|
|
|
|
|
|
def _prenoms_compatible(a: str, b: str) -> bool:
|
|
"""True si l'un des prénoms est un préfixe-mot de l'autre.
|
|
|
|
'samuel' vs 'samuel nathanael' → True
|
|
'mendes carlos' vs 'mendes carlos david' → True
|
|
'marie' vs 'marie-claude' → False (tiret, pas espace)
|
|
"""
|
|
if not a or not b:
|
|
return False
|
|
short, long = (a, b) if len(a) <= len(b) else (b, a)
|
|
return long == short or long.startswith(short + " ")
|
|
|
|
|
|
def find_or_create_apprenti(
|
|
session: Session, nom: str, prenom: str, classe: str
|
|
) -> "Apprenti":
|
|
"""Trouve ou crée un Apprenti avec déduplication sur le prénom.
|
|
|
|
1. Correspondance exacte nom+prénom+classe.
|
|
2. Si introuvable : cherche parmi les apprentis de même nom+classe celui dont
|
|
le prénom est compatible (l'un est un préfixe-mot de l'autre).
|
|
Ne fusionne que s'il y a exactement un candidat.
|
|
3. Sinon : crée un nouvel Apprenti.
|
|
|
|
Garde-fou : refuse la création pour les classes MP/MI. Les MP servent
|
|
uniquement au matching Matu (lookup par nom dans une classe régulière) ;
|
|
les MI sont totalement ignorées. Lève ValueError si on tente de créer
|
|
une nouvelle entrée dans ces classes.
|
|
"""
|
|
if not classe or not classe.strip():
|
|
# Empêche les orphelins quand le parser PDF n'arrive pas à extraire
|
|
# la classe du header "Liste des absences de NOM, classe CODE".
|
|
raise ValueError(
|
|
f"Création d'apprenti refusée : classe vide pour {nom!r} {prenom!r}. "
|
|
f"Vérifier le PDF source (header de page incomplet)."
|
|
)
|
|
if classe.startswith(("MP", "MI")):
|
|
# Pour MP/MI : on retourne None implicitement via une exception. L'appelant
|
|
# (importer.py) doit avoir filtré au préalable. Cette garde évite tout
|
|
# nouvel import accidentel.
|
|
raise ValueError(
|
|
f"Création d'apprenti refusée pour la classe '{classe}' "
|
|
f"(MP/MI réservées au matching Matu via classes régulières)."
|
|
)
|
|
# 1. Exact
|
|
apprenti = session.execute(
|
|
select(Apprenti).where(
|
|
Apprenti.nom == nom,
|
|
Apprenti.prenom == prenom,
|
|
Apprenti.classe == classe,
|
|
)
|
|
).scalar_one_or_none()
|
|
if apprenti:
|
|
return apprenti
|
|
|
|
# 2. Fuzzy sur prénom
|
|
candidates = session.execute(
|
|
select(Apprenti).where(
|
|
Apprenti.nom == nom,
|
|
Apprenti.classe == classe,
|
|
)
|
|
).scalars().all()
|
|
|
|
prenom_n = _norm_prenom(prenom)
|
|
matches = [ap for ap in candidates if _prenoms_compatible(prenom_n, _norm_prenom(ap.prenom))]
|
|
if len(matches) == 1:
|
|
return matches[0]
|
|
|
|
# 3. Création
|
|
apprenti = Apprenti(nom=nom, prenom=prenom, classe=classe)
|
|
session.add(apprenti)
|
|
session.flush()
|
|
return apprenti
|
|
|
|
|
|
def get_session(db_url: str | None = None) -> Session:
|
|
engine = get_engine(db_url)
|
|
return sessionmaker(bind=engine)()
|
|
|
|
|
|
if __name__ == "__main__":
|
|
if len(sys.argv) > 1 and sys.argv[1] == "init":
|
|
eng = init_db()
|
|
print(f"Base initialisée : {DB_PATH}")
|
|
else:
|
|
print("Usage : python -m src.db init")
|