import requests
from bs4 import BeautifulSoup
import json
import sqlite3
import time

DEALS_URL = "https://www.dealabs.com/search/bons-plans?merchant-id=36&sortBy=temp&hide_expired=true&hide_local=true"
DB_PATH = "deals.sqlite3"
AMAZON_AFF_ID = "mediattoads-21"  # Remplace par ton TAG Amazon
LIENSO_API_KEY = "3a4b518e9aad307b039ba22b59bbb6df"
MAX_DEALS = 5  # Mets le nombre que tu veux ici !


def load_cookies(filename):
    print("Chargement des cookies...")
    with open(filename, 'r') as f:
        cookies_list = json.load(f)
        cookies = {c['name']: c['value'] for c in cookies_list}
    return cookies

def fetch_deals(cookies):
    print("Récupération des deals...")
    headers = {
        "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/114.0.0.0 Safari/537.36"
    }
    r = requests.get(DEALS_URL, cookies=cookies, headers=headers)
    print("Statut HTTP:", r.status_code)
    soup = BeautifulSoup(r.text, 'html.parser')
    with open("debug_dealabs.html", "w", encoding="utf-8") as f:
        f.write(r.text)
    deals = []
    deal_elements = soup.select('article.thread--deal')
    print(f"DEBUG: {len(deal_elements)} deals trouvés !")
    for i, d in enumerate(deal_elements):
        if i >= MAX_DEALS:
            break
        try:
            # Titre
            title_tag = d.select_one('a.thread-title, a.thread-title--list')
            if not title_tag:
                title_tag = d.select_one('a.js-thread-title')
            titre = title_tag.get('title') if title_tag else "[aucun titre]"
            # Lien
            lien = title_tag['href'] if title_tag and title_tag.has_attr('href') else ""
            lien = "https://www.dealabs.com" + lien if lien.startswith("/") else lien
            # Prix : dans le JSON Vue3 attaché à l'article
            price = None
            vue3_data = d.find('div', {'data-vue3': True})
            if vue3_data:
                import json
                vue3 = json.loads(vue3_data['data-vue3'])
                thread = vue3.get('props', {}).get('thread', {})
                price = thread.get('price')
            # fallback HTML (rare)
            if not price:
                price_tag = d.select_one('.thread-price')
                price = price_tag.get_text(strip=True) if price_tag else "N/A"
            # Image (approche basique)
            img = d.select_one("img")['src'] if d.select_one("img") else ""
            deals.append({
                'titre': titre,
                'prix': price,
                'lien': lien,
                'image': img
            })
        except Exception as e:
            print(f"[ERREUR] Parsing d'un deal: {e}")
    print(f"{len(deals)} deal(s) trouvé(s) (maximum = {MAX_DEALS})")
    return deals


def add_amazon_aff_link(url):
    if "amazon." in url:
        if "tag=" in url:
            return url  # déjà affilié
        sep = '&' if '?' in url else '?'
        return url + sep + "tag=" + AMAZON_AFF_ID
    return url

def shorten_url(long_url):
    try:
        payload = {
            "location_url": long_url
        }
        headers = {
            "Authorization": f"Bearer {LIENSO_API_KEY}"
        }
        # Création du lien chez Lienso
        req = requests.post(
            "https://lienso.fr/api/links",
            data=payload,
            headers=headers,
            timeout=10
        )
        print(f"[Lienso] Status: {req.status_code} / {req.text}")
        if req.status_code in (200, 201):
            resp = req.json()
            lienso_id = resp.get("data", {}).get("id")
            if lienso_id is not None:
                # Récupération des infos du lien créé (incl. short_url)
                req2 = requests.get(
                    f"https://lienso.fr/api/links/{lienso_id}",
                    headers=headers,
                    timeout=10
                )
                print(f"[Lienso GET] Status: {req2.status_code} / {req2.text}")
                if req2.status_code == 200:
                    data = req2.json()
                    return data.get("data", {}).get("short_url") or data.get("data", {}).get("url") or long_url
                else:
                    print(f"[ERREUR] GET Lienso: {req2.status_code} {req2.text}")
        else:
            print(f"[ERREUR] POST Lienso: {req.status_code} {req.text}")
    except Exception as e:
        print(f"[EXCEPTION] Lienso: {e}")
    return long_url


def init_db():
    conn = sqlite3.connect(DB_PATH)
    c = conn.cursor()
    c.execute('''CREATE TABLE IF NOT EXISTS deals (
        id INTEGER PRIMARY KEY AUTOINCREMENT,
        titre TEXT,
        prix TEXT,
        lien TEXT,
        lien_short TEXT,
        image TEXT,
        expire INTEGER DEFAULT 0,
        ajout TIMESTAMP DEFAULT CURRENT_TIMESTAMP
    )''')
    conn.commit()
    conn.close()
    print("Base SQLite OK.")

def store_deal(deal):
    conn = sqlite3.connect(DB_PATH)
    c = conn.cursor()
    c.execute("SELECT id FROM deals WHERE lien_short=?", (deal['lien_short'],))
    if c.fetchone():
        conn.close()
        print(f"DUPLICAT (déjà en BDD): {deal['titre']}")
        return False
    c.execute("INSERT INTO deals (titre, prix, lien, lien_short, image, expire) VALUES (?, ?, ?, ?, ?, 0)",
              (deal['titre'], deal['prix'], deal['lien'], deal['lien_short'], deal['image']))
    conn.commit()
    conn.close()
    print(f"Ajouté à la BDD: {deal['titre']} ({deal['lien_short']})")
    return True

def main():
    print("=== Scraper Deals.py ===")
    cookies = load_cookies("cookies.json")
    init_db()
    deals = fetch_deals(cookies)
    n_ok = 0
    for d in deals:
        d['lien'] = add_amazon_aff_link(d['lien'])
        d['lien_short'] = shorten_url(d['lien'])
        if store_deal(d):
            n_ok += 1
        time.sleep(1.5)  # éviter d'être trop rapide pour Lienso
    print(f"Script terminé, {n_ok} nouveau(x) deal(s) ajouté(s).")

if __name__ == "__main__":
    main()
