import sys
sys.path.insert(0,'/usr/lib/chromium-browser/chromedriver')

from seleniumwire import webdriver
from seleniumwire.webdriver.common.by import By
from seleniumwire.webdriver.chrome.options import Options
from seleniumwire.webdriver.support.ui import WebDriverWait
from seleniumwire.webdriver.support import expected_conditions as EC
from bs4 import BeautifulSoup
import time

# define the request interceptor to configure custom headers
def interceptor(driver):

    # add the missing headers
    request.headers["Accept-Language"] = "en-US,en;q=0.9"
    request.headers["Referer"] = "https://www.google.com/"

    # delete the existing misconfigured default headers values
    del request.headers["User-Agent"]
    del request.headers["Sec-Ch-Ua"]
    del request.headers["Sec-Fetch-Site"]
    del request.headers["Accept-Encoding"]

    # replace the deleted headers with edited values
    request.headers["User-Agent"] = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/122.0.0.0 Safari/537.36"
    request.headers["Sec-Ch-Ua"] = "\"Chromium\";v=\"122\", \"Not(A:Brand\";v=\"24\", \"Google Chrome\";v=\"122\""
    request.headers["Sec-Fetch-Site"] = "cross-site"
    request.headers["Accept-Encoding"] = "gzip, deflate, br, zstd"

# Configure Selenium to run in headless mode
chrome_options = Options()
chrome_options.add_argument("--headless")  # Run in headless mode
chrome_options.add_argument("--disable-gpu")
chrome_options.add_argument("--no-sandbox")

# Initialize WebDriver
driver = webdriver.Chrome(options=chrome_options)

# add the interceptor
driver.request_interceptor = interceptor

try:
    # Open the target URL
    driver.get("https://www.harvestcuisines.com/s/order#most-popular")

    # Wait for the button to be clickable and click the button
    wait = WebDriverWait(driver, 10)
    pickup_button = wait.until(EC.element_to_be_clickable((By.CSS_SELECTOR, "button.m-segment--pickup")))
    pickup_button.click()

    # Wait 1500ms (1.5 seconds) for the page to update
    time.sleep(1.5)

    # Parse the page source with BeautifulSoup
    soup = BeautifulSoup(driver.page_source, 'html.parser')

    # Array to store the results
    items = []

    # Iterate through each item container
    for item_container in soup.select("div.item-container"):
        # Extract item name and price
        item_name = item_container.select_one(".item__title").get_text(strip=True)
        item_price = item_container.select_one(".item__price-and-badges").get_text(strip=True)
        
        # Store the results in a dictionary
        items.append({
            "itemName": item_name,
            "itemPrice": item_price
        })

    # Print the results
    for item in items:
        print(f"Item Name: {item['itemName']}, Item Price: {item['itemPrice']}")

finally:
    # Close the WebDriver
    driver.quit()