#!/usr/bin/env python3
import os
import csv
import tempfile
import paramiko
import mysql.connector
from datetime import datetime

# ————— Config SFTP —————
SFTP_HOST       = "emde.autarcia.com"
SFTP_PORT       = 22
SFTP_USERNAME   = "ftp-shoppingfeed"
SFTP_PASSWORD   = "5vm_*2I[f2yL2A6J!/dE"
REMOTE_CSV_PATH = "/exports/CSV/exp_swing/SW_HISTO_ORDER_LINES_2665.CSV"

# ————— Config BDD —————
DB_HOST    = 'fz321.myd.infomaniak.com'
DB_NAME    = 'fz321_emde_b2b'
DB_USER    = 'fz321_emde_ref'
DB_PASS    = '4eDCSYh#a612_%'
BATCH_SIZE = 1000  # Nombre de lignes par batch

def download_csv_via_sftp(remote_path: str) -> str:
    tmp = tempfile.NamedTemporaryFile(delete=False, suffix=".csv")
    local_path = tmp.name
    tmp.close()

    transport = paramiko.Transport((SFTP_HOST, SFTP_PORT))
    transport.connect(username=SFTP_USERNAME, password=SFTP_PASSWORD)
    sftp = paramiko.SFTPClient.from_transport(transport)
    try:
        sftp.get(remote_path, local_path)
        print(f"✅ CSV téléchargé dans {local_path}")
    finally:
        sftp.close()
        transport.close()

    return local_path

def parse_date(dt_str: str) -> str:
    """Convertit 'DD/MM/YYYY' en 'YYYY-MM-DD'."""
    return datetime.strptime(dt_str.strip(), "%d/%m/%Y").strftime("%Y-%m-%d")

def to_int(value: str) -> int:
    """Convertit une chaîne avec virgule en int (tronque)."""
    if not value:
        return 0
    return int(float(value.replace(',', '.')))

def to_float(value: str) -> float:
    """Convertit une chaîne avec virgule en float."""
    if not value:
        return 0.0
    return float(value.replace(',', '.'))

def import_order_lines(csv_path: str):
    conn = mysql.connector.connect(
        host=DB_HOST, user=DB_USER, password=DB_PASS,
        database=DB_NAME, charset='utf8mb4'
    )
    cursor = conn.cursor()

    insert_sql = """
      INSERT INTO order_lines
        (CLC_CLC_ID, CLC_PDT_ID, CLC_CLC_QTE_UB, CLC_CLC_QTE_LIVREE,
         CLC_CLC_CMD_ID, CLC_CLC_MONTANT_HT, CLC_CLC_PRIX_UNIT_NET,
         CLC_CLC_PRIX_R1, CLC_CLC_PRIX_R2, CLC_CLC_PRIX_R3,
         CLC_DATE_DEMANDE, ARTICLE_NON_LIVRE, CLC_CLC_ETAT_ID)
      VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)
    """

    # 1) Charger et parser tout le CSV
    all_rows = []
    with open(csv_path, newline='', encoding='utf-8') as f:
        reader = csv.reader(f, delimiter=';')
        next(reader)  # ignorer l’en-tête
        for row in reader:
            (
              clc_id, pdt_id, qte_ub, qte_livree, cmd_id,
              montant, unitnet, r1, r2, r3,
              date_demande, article_non_livre, etat_id
            ) = [v.strip().strip('"') for v in row]

            all_rows.append({
                "clc_id":           to_int(clc_id),
                "pdt_id":           to_int(pdt_id),
                "qte_ub":           to_int(qte_ub),
                "qte_livree":       to_int(qte_livree),
                "cmd_id":           to_int(cmd_id),
                "mont_ht":          to_float(montant),
                "unit_net":         to_float(unitnet),
                "r1":               to_float(r1),
                "r2":               to_float(r2),
                "r3":               to_float(r3),
                "date_demande":     parse_date(date_demande),
                "article_non_livre": to_int(article_non_livre),
                "etat_id":          etat_id
            })

    if not all_rows:
        print("⚠️ Aucun enregistrement trouvé dans le CSV.")
        cursor.close()
        conn.close()
        return

    # 2) Récupérer les IDs de lignes déjà présents
    ids = [r["clc_id"] for r in all_rows]
    existing_ids = set()
    CHUNK = 500
    for i in range(0, len(ids), CHUNK):
        chunk = ids[i:i+CHUNK]
        sql = f"SELECT CLC_CLC_ID FROM order_lines WHERE CLC_CLC_ID IN ({','.join(['%s']*len(chunk))})"
        cursor.execute(sql, chunk)
        existing_ids.update(r[0] for r in cursor.fetchall())

    # **Nouvelle étape** : charger tous les CMD_CMD_ID existants
    cursor.execute("SELECT CMD_CMD_ID FROM orders")
    existing_cmd_ids = set(r[0] for r in cursor.fetchall())

    # 3) Filtrer : garder uniquement les lignes non encore importées
    #    ET dont la commande existe en base
    new_data = []
    for r in all_rows:
        if r["clc_id"] in existing_ids:
            continue
        if r["cmd_id"] not in existing_cmd_ids:
            # on peut journaliser ou simplement sauter
            print(f"⚠️ Ligne {r['clc_id']} réf. cmd {r['cmd_id']} introuvable dans orders, skip")
            continue
        new_data.append((
            r["clc_id"], r["pdt_id"], r["qte_ub"], r["qte_livree"],
            r["cmd_id"], r["mont_ht"], r["unit_net"],
            r["r1"], r["r2"], r["r3"],
            r["date_demande"], r["article_non_livre"], r["etat_id"]
        ))

    if not new_data:
        print("ℹ️ Aucune nouvelle ligne valide à insérer.")
        cursor.close()
        conn.close()
        return

    # 4) Insert en batch
    total    = len(new_data)
    inserted = 0
    for i in range(0, total, BATCH_SIZE):
        batch = new_data[i:i+BATCH_SIZE]
        cursor.executemany(insert_sql, batch)
        conn.commit()
        inserted += cursor.rowcount
        print(f"➕ Batch inséré : lignes {i} → {i+len(batch)-1}")

    print(f"✅ Import terminé, {inserted} nouvelles lignes insérées.")
    cursor.close()
    conn.close()

if __name__ == "__main__":
    local_csv = download_csv_via_sftp(REMOTE_CSV_PATH)
    try:
        import_order_lines(local_csv)
    finally:
        os.remove(local_csv)
