#!/usr/bin/env python3
import os
import csv
import tempfile
import paramiko
import mysql.connector

# ————— Config SFTP —————
SFTP_HOST       = "emde.autarcia.com"
SFTP_PORT       = 22
SFTP_USERNAME   = "ftp-shoppingfeed"
SFTP_PASSWORD   = "5vm_*2I[f2yL2A6J!/dE"
REMOTE_CSV_PATH = "/exports/CSV/exp_swing/SW_DATA_PRODUCT1_2694.CSV"

# ————— Config BDD —————
DB_HOST    = 'fz321.myd.infomaniak.com'
DB_NAME    = 'fz321_emde_b2b'
DB_USER    = 'fz321_emde_ref'
DB_PASS    = '4eDCSYh#a612_%'

# Nombre de lignes par batch pour executemany
BATCH_SIZE = 1000

def download_csv_via_sftp(remote_path: str) -> str:
    """Télécharge le CSV SFTP en local et retourne son chemin."""
    tmp = tempfile.NamedTemporaryFile(delete=False, suffix=".csv")
    local_path = tmp.name
    tmp.close()

    transport = paramiko.Transport((SFTP_HOST, SFTP_PORT))
    transport.connect(username=SFTP_USERNAME, password=SFTP_PASSWORD)
    sftp = paramiko.SFTPClient.from_transport(transport)
    try:
        sftp.get(remote_path, local_path)
        print(f"✅ CSV téléchargé dans {local_path}")
    finally:
        sftp.close()
        transport.close()

    return local_path

def import_products(csv_path: str):
    """Parse et upsert les produits depuis le CSV."""
    conn = mysql.connector.connect(
        host=DB_HOST, user=DB_USER, password=DB_PASS,
        database=DB_NAME, charset='utf8mb4'
    )
    cursor = conn.cursor()

    upsert_sql = """
    INSERT INTO products
      (PDT_PDT_ID, PDT_PDT_REFERENCE, PDT_PDT_DESIGNATION)
    VALUES (%s, %s, %s)
    ON DUPLICATE KEY UPDATE
      PDT_PDT_REFERENCE   = VALUES(PDT_PDT_REFERENCE),
      PDT_PDT_DESIGNATION = VALUES(PDT_PDT_DESIGNATION)
    """

    rows = []
    with open(csv_path, newline='', encoding='utf-8') as f:
        reader = csv.reader(f, delimiter=';')
        headers = next(reader)  # ignorer l'entête
        for row in reader:
            # [ "REF","ID","DESIGNATION", ... ]
            ref, id_, desig, *rest = [v.strip().strip('"') for v in row]
            try:
                pid = int(id_)
            except ValueError:
                print(f"⚠️  ID invalide, ligne ignorée : {row}")
                continue
            rows.append((pid, ref or None, desig or None))

    if not rows:
        print("ℹ️ Aucun enregistrement valide à importer.")
        cursor.close()
        conn.close()
        return

    # exécution par lots
    total = len(rows)
    for i in range(0, total, BATCH_SIZE):
        batch = rows[i:i+BATCH_SIZE]
        cursor.executemany(upsert_sql, batch)
        conn.commit()
        print(f"✅ Lot {i//BATCH_SIZE+1} importé ({len(batch)} lignes)")

    print(f"✅ Import terminé : {total} lignes traitées.")
    cursor.close()
    conn.close()

if __name__ == "__main__":
    local_csv = download_csv_via_sftp(REMOTE_CSV_PATH)
    try:
        import_products(local_csv)
    finally:
        os.remove(local_csv)
