import time
from bs4 import BeautifulSoup
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC

def scrape(driver, url):
    driver.get(url)

    # Step 1: Wait for links to load
    try:
        time.sleep(5)
        WebDriverWait(driver, 15).until(
            EC.presence_of_element_located((By.TAG_NAME, 'a'))
        )
        print("✅ Page content loaded.")
    except:
        print("⚠️ Timed out waiting for product links.")

    time.sleep(1.5)  # JS render buffer

    soup = BeautifulSoup(driver.page_source, 'html.parser')

    # Step 2: Collect product URLs
    base = "https://sundaysevenmeals.com/product/"
    links = list(set(
        a['href'] for a in soup.find_all('a', href=True)
        if a['href'].startswith(base)
    ))
    print(f"🔗 Found {len(links)} product links.")

    items = []

    # Step 3: Visit each product page and extract price
    for link in links:
        try:
            driver.get(link)

            WebDriverWait(driver, 10).until(
                EC.presence_of_element_located((By.CLASS_NAME, "price"))
            )
            time.sleep(1.0)

            product_soup = BeautifulSoup(driver.page_source, 'html.parser')
            price_tag = product_soup.select_one('span.price.amount.options bdi')

            items.append({
                "url": link,
                "price": price_tag.get_text(strip=True) if price_tag else "N/A"
            })

        except Exception as e:
            print(f"⚠️ Skipping {link} due to error: {e}")
            continue

    return items