import os
import tempfile
import mysql.connector
import requests
from datetime import datetime
from selenium import webdriver
from selenium.webdriver.chrome.options import Options
from selenium.webdriver.chrome.service import Service
from webdriver_manager.chrome import ChromeDriverManager

# Site handlers
from site_handlers import (
    harvest, elitepro, zedrics, marias, sundayseven,
    fuelf33d, cleanpreps, mealsbyj, localhealthmarket,
    gourmetgainz, fitfoodie, default_handler
)

# --- Config ---
competitor_sites = [
    {"name": "Harvest Cuisines", "url": "https://www.harvestcuisines.com/s/order#most-popular"},
    {"name": "Elite Pro Meals", "url": "https://elitepromeal.com/shop"},
    {"name": "Zedrics", "url": "https://zedrics.com/collections/all-product-offerings"},
    {"name": "Marias Meal Prep", "url": "https://www.mariasmealpreps.com/online-ordering-1"},
    {"name": "Sunday Seven", "url": "https://sundaysevenmeals.com/menu/"},
    {"name": "Fuel F33d", "url": "https://www.fuelf33dhealthshack.com/order-online"},
    {"name": "Clean Preps", "url": "https://cleanpreps.com/shop/"},
    {"name": "Meals By J", "url": "https://www.mealsbyj.net/s/order#most-popular"},
    {"name": "Local Health Market", "url": "https://localhealthmarket.com/meal-delivery/menu/"},
    {"name": "Gourmet Gainz", "url": "https://forms.gle/7K1hD2ywfspQxCDb8"},
    {"name": "Fit Foodie", "url": "https://fitfoodiesa.com/collections/full-menu"}
]

scrape_map = {
    "Harvest Cuisines": harvest.scrape,
    "Elite Pro Meals": elitepro.scrape,
    "Zedrics": zedrics.scrape,
    "Marias Meal Prep": marias.scrape,
    "Sunday Seven": sundayseven.scrape,
    "Fuel F33d": fuelf33d.scrape,
    "Clean Preps": cleanpreps.scrape,
    "Meals By J": mealsbyj.scrape,
    "Local Health Market": localhealthmarket.scrape,
    "Gourmet Gainz": gourmetgainz.scrape,
    "Fit Foodie": fitfoodie.scrape
}

DEBUG_DIR = os.path.join(os.path.dirname(__file__), 'debug')
os.makedirs(DEBUG_DIR, exist_ok=True)

# --- Notifications ---
def notify(message, urgency=1):
    try:
        payload = {'message': message, 'urgency': urgency}
        requests.post('http://localhost/helpers/notifications.php?action=add', json=payload, timeout=3)
    except Exception as err:
        print(f"[notify] Failed: {err}")

# --- Setup DB ---
def setup_database():
    conn = mysql.connector.connect(
        host="localhost",
        user="fitfoodie",
        password="FitFoodie25!",
        database="fitfoodiedb"
    )
    cur = conn.cursor()
    cur.execute("""
        CREATE TABLE IF NOT EXISTS competitor_products (
            id INT AUTO_INCREMENT PRIMARY KEY,
            brand VARCHAR(255),
            product_name VARCHAR(255),
            price VARCHAR(50),
            image_url TEXT,
            scraped_at DATETIME DEFAULT CURRENT_TIMESTAMP
        )
    """)
    return conn, cur

# --- Setup Browser ---
def setup_browser():
    profile = tempfile.mkdtemp()
    options = Options()
    options.add_argument('--headless=new')
    options.add_argument('--disable-gpu')
    options.add_argument('--no-sandbox')
    options.add_argument('--disable-dev-shm-usage')
    options.add_argument('--window-size=1920,1080')
    options.add_argument('--user-agent=Mozilla/5.0')
    options.add_argument(f'--user-data-dir={profile}')
    return webdriver.Chrome(service=Service(ChromeDriverManager().install()), options=options)

# --- Get last known items for brand ---
def get_previous_data(cursor, brand):
    cursor.execute("""
        SELECT product_name, price FROM competitor_products
        WHERE brand = %s
        ORDER BY scraped_at DESC
        LIMIT 500
    """, (brand,))
    result = cursor.fetchall()
    return {name.strip().lower(): price.strip() for name, price in result}

# --- Save scraped data ---
def save_to_db(cursor, brand, items):
    for item in items:
        cursor.execute("""
            INSERT INTO competitor_products (brand, product_name, price, image_url, scraped_at)
            VALUES (%s, %s, %s, %s, NOW())
        """, (brand, item['name'], item['price'], item.get('image')))

# --- Handle each site ---
def scrape_site(driver, cursor, brand, url):
    print(f"\n🔍 Scraping {brand} ({url})")
    try:
        previous_data = get_previous_data(cursor, brand)
        scrape_fn = scrape_map.get(brand, default_handler.scrape)
        items = scrape_fn(driver, url)

        # Screenshot
        safe_name = ''.join(c for c in brand if c.isalnum() or c in (' ', '_')).replace(' ', '_')
        screenshot_path = os.path.join(DEBUG_DIR, f"{safe_name}.png")
        driver.save_screenshot(screenshot_path)
        print(f"📸 Screenshot: {screenshot_path}")

        # Compare
        scraped_names = set()
        new_items, price_changes, removed_items = [], [], []

        for item in items:
            name = item['name'].strip()
            price = item['price'].strip()
            scraped_names.add(name.lower())

            old_price = previous_data.get(name.lower())
            if old_price is None:
                new_items.append(name)
            elif old_price != price:
                price_changes.append((name, old_price, price))

        for old_name in previous_data.keys():
            if old_name not in scraped_names:
                removed_items.append(old_name)

        # Notify
        for name in new_items[:5]:
            notify(f"{brand}: new item '{name}' added", urgency=1)
        for name, old, new in price_changes[:5]:
            notify(f"{brand}: price change on '{name}' from {old} → {new}", urgency=2)
        for name in removed_items[:5]:
            notify(f"{brand}: '{name}' no longer found", urgency=2)

        save_to_db(cursor, brand, items)
        print(f"✅ Saved {len(items)} items from {brand}.")

    except Exception as e:
        print(f"❌ Failed to scrape {brand}: {e}")

# --- Main ---
def main():
    db, cursor = setup_database()
    browser = setup_browser()

    for site in competitor_sites:
        scrape_site(browser, cursor, site['name'], site['url'])
        db.commit()

    browser.quit()
    cursor.close()
    db.close()
    print("\n🎉 All scraping completed.\n")

if __name__ == "__main__":
    main()