import time
from bs4 import BeautifulSoup
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC

def scrape(driver, base_url):
    items = []

    for page in range(1, 5):  # Zedric’s has 4 pages
        page_url = f"{base_url}?page={page}"
        print(f"🔄 Loading: {page_url}")
        driver.get(page_url)

        try:
            WebDriverWait(driver, 15).until(
                EC.presence_of_element_located((By.CSS_SELECTOR, "li.grid__item"))
            )
        except:
            print(f"⚠️ Timeout waiting for products on page {page}. Saving screenshot and skipping.")

            # Create a filename with a timestamp
            filename = f"/var/www/html/scrapers/competitors/debug/screenshot_page_{page}.png"

            # Save the screenshot
            driver.save_screenshot(filename)

            print(f"🖼️ Screenshot saved to screenshots/{filename}")
            continue

        soup = BeautifulSoup(driver.page_source, "html.parser")
        product_cards = soup.select("li.grid__item")

        for card in product_cards:
            title_el = card.select_one("a.full-unstyled-link")
            price_el = card.select_one(".price__container .price-item")
            img_el = card.select_one("img")

            if title_el and price_el:
                items.append({
                    "name": title_el.get_text(strip=True),
                    "price": price_el.get_text(strip=True),
                    "image": img_el["src"] if img_el and img_el.has_attr("src") else None,
                    "url": title_el["href"] if title_el.has_attr("href") else None
                })
            else:
                print("❌ Skipped a product due to missing data.")

        print(f"✅ Found {len(product_cards)} items on page {page}")

    return items