import csv
import json
import mimetypes
import os
import time
import uuid
from collections import defaultdict
from urllib.parse import urlparse

import requests

# ---- TOKENS (kept intact) ----
ACCESS_TOKEN_SANDBOX = "EAAAl60tZLgxdxhvD2Jdh7dRXcJz8nSsvcBW6g9rlXDVof6xFrXWMd29UCbOYmyW"
ACCESS_TOKEN = "EAAAlzTxcLpe46hddBlJOn5W7AhlOS75IQoVrilB1LbXfO5qH0RlKZj4ceXQN"

# ---- CONFIG ----
BASE_URL = "https://connect.squareup.com/v2"
SQUARE_VERSION = "2026-01-22"  # matches current doc example  [oai_citation:2‡Square](https://developer.squareup.com/reference/square/catalog-api/create-catalog-image)

INPUT_MAP_CSV = "square_image_relink_map.csv"   # produced by the mapper
OUT_RESULTS_CSV = "square_image_upload_results.csv"

DOWNLOAD_DIR = "./_tmp_images"
REQUEST_TIMEOUT_S = 60
SLEEP_BETWEEN_UPLOADS_S = 0.25  # mild throttling

# Set to True for a safe dry run that downloads images but DOES NOT upload to Square
DRY_RUN = False

HEADERS_NEW = {
    "Authorization": f"Bearer {ACCESS_TOKEN_SANDBOX}",
    "Square-Version": SQUARE_VERSION,
    "Accept": "application/json",
    # Don't set Content-Type here; requests will set multipart boundary automatically
}


def safe_filename(s: str) -> str:
    return "".join(c if c.isalnum() or c in ("-", "_", ".") else "_" for c in s)[:180]


def infer_mime_and_ext(url: str, content_type: str | None):
    # Prefer server Content-Type if it's an image
    if content_type and content_type.startswith("image/"):
        mime = content_type.split(";")[0].strip()
        ext = mimetypes.guess_extension(mime) or ""
        return mime, ext

    # Fallback to URL extension
    path = urlparse(url).path
    _, ext = os.path.splitext(path)
    ext = ext.lower()
    mime = mimetypes.types_map.get(ext, "application/octet-stream")
    return mime, ext


def download_image(url: str, dest_dir: str, name_hint: str):
    os.makedirs(dest_dir, exist_ok=True)

    r = requests.get(url, timeout=REQUEST_TIMEOUT_S, allow_redirects=True)
    r.raise_for_status()

    content_type = r.headers.get("Content-Type")
    mime, ext = infer_mime_and_ext(url, content_type)

    if not ext:
        # last resort: guess from mime
        ext = mimetypes.guess_extension(mime) or ".img"

    filename = safe_filename(name_hint) + ext
    path = os.path.join(dest_dir, filename)

    with open(path, "wb") as f:
        f.write(r.content)

    return path, mime, len(r.content), content_type


def create_and_attach_image(new_item_id: str, image_path: str, mime: str, caption: str, is_primary: bool):
    # CreateCatalogImage expects multipart/form-data:
    # -F file=@/path/to/file.jpg
    # -F request='{...json...}'  [oai_citation:3‡Square](https://developer.squareup.com/reference/square/catalog-api/create-catalog-image)
    req_body = {
        "idempotency_key": str(uuid.uuid4()),
        "object_id": new_item_id,  # attach during upload  [oai_citation:4‡Square](https://developer.squareup.com/reference/square/catalog-api/create-catalog-image)
        "image": {
            "id": "#TEMP_ID",
            "type": "IMAGE",
            "image_data": {
                "caption": caption[:1000] if caption else ""
            }
        },
        "is_primary": bool(is_primary)  # primary behavior described in docs  [oai_citation:5‡Square](https://developer.squareup.com/reference/square/catalog-api/create-catalog-image)
    }

    with open(image_path, "rb") as f:
        files = {
            "file": (os.path.basename(image_path), f, mime)
        }
        data = {
            "request": json.dumps(req_body)
        }

        resp = requests.post(
            f"{BASE_URL}/catalog/images",
            headers=HEADERS_NEW,
            files=files,
            data=data,
            timeout=REQUEST_TIMEOUT_S
        )

    # Square returns JSON with either "image" or "errors"
    try:
        payload = resp.json()
    except Exception:
        payload = {"raw_text": resp.text}

    if resp.status_code >= 400:
        return False, None, payload

    image_obj = payload.get("image") or {}
    new_image_id = image_obj.get("id")
    return True, new_image_id, payload


def main():
    # 1) Read map CSV
    rows = []
    with open(INPUT_MAP_CSV, "r", newline="") as f:
        reader = csv.DictReader(f)
        for row in reader:
            # only attempt matched rows
            if row.get("match_status") not in ("MATCHED", "AMBIGUOUS_NAME_MULTIPLE_NEW_ITEMS"):
                continue
            rows.append(row)

    if not rows:
        print(f"No rows to process from {INPUT_MAP_CSV}")
        return

    # 2) Group by new_item_id
    by_item = defaultdict(list)
    for r in rows:
        new_item_id = (r.get("new_item_id") or "").strip()
        if not new_item_id:
            continue
        by_item[new_item_id].append(r)

    print(f"Will process {len(by_item)} new items, {len(rows)} image rows total")
    os.makedirs(DOWNLOAD_DIR, exist_ok=True)

    # 3) Upload + attach
    results = []
    total = 0
    ok_count = 0

    for new_item_id, item_rows in by_item.items():
        # stable order so "primary" is deterministic
        item_rows_sorted = sorted(item_rows, key=lambda x: (x.get("image_id") or "", x.get("image_url") or ""))

        for idx, r in enumerate(item_rows_sorted):
            total += 1
            item_name = (r.get("item_name") or "").strip()
            image_url = (r.get("image_url") or "").strip()
            old_image_id = (r.get("image_id") or "").strip()

            if not image_url:
                results.append({
                    "status": "SKIP_NO_URL",
                    "new_item_id": new_item_id,
                    "item_name": item_name,
                    "old_image_id": old_image_id,
                    "image_url": image_url,
                    "download_path": "",
                    "mime": "",
                    "square_new_image_id": "",
                    "error": "missing image_url"
                })
                continue

            # Download first (always)
            try:
                name_hint = f"{item_name}_{new_item_id}_{old_image_id or idx}"
                img_path, mime, size_b, server_ct = download_image(image_url, DOWNLOAD_DIR, name_hint)
            except Exception as e:
                results.append({
                    "status": "DOWNLOAD_FAIL",
                    "new_item_id": new_item_id,
                    "item_name": item_name,
                    "old_image_id": old_image_id,
                    "image_url": image_url,
                    "download_path": "",
                    "mime": "",
                    "square_new_image_id": "",
                    "error": str(e)
                })
                continue

            if DRY_RUN:
                results.append({
                    "status": "DRY_RUN_DOWNLOADED",
                    "new_item_id": new_item_id,
                    "item_name": item_name,
                    "old_image_id": old_image_id,
                    "image_url": image_url,
                    "download_path": img_path,
                    "mime": mime,
                    "square_new_image_id": "",
                    "error": ""
                })
                continue

            # Upload + attach
            try:
                is_primary = (idx == 0)
                caption = item_name or "Migrated image"
                ok, new_image_id, payload = create_and_attach_image(new_item_id, img_path, mime, caption, is_primary)

                if ok:
                    ok_count += 1
                    results.append({
                        "status": "UPLOADED_ATTACHED",
                        "new_item_id": new_item_id,
                        "item_name": item_name,
                        "old_image_id": old_image_id,
                        "image_url": image_url,
                        "download_path": img_path,
                        "mime": mime,
                        "square_new_image_id": new_image_id or "",
                        "error": ""
                    })
                else:
                    results.append({
                        "status": "UPLOAD_FAIL",
                        "new_item_id": new_item_id,
                        "item_name": item_name,
                        "old_image_id": old_image_id,
                        "image_url": image_url,
                        "download_path": img_path,
                        "mime": mime,
                        "square_new_image_id": "",
                        "error": json.dumps(payload)[:4000]
                    })
            except Exception as e:
                results.append({
                    "status": "UPLOAD_EXCEPTION",
                    "new_item_id": new_item_id,
                    "item_name": item_name,
                    "old_image_id": old_image_id,
                    "image_url": image_url,
                    "download_path": img_path,
                    "mime": mime,
                    "square_new_image_id": "",
                    "error": str(e)
                })

            time.sleep(SLEEP_BETWEEN_UPLOADS_S)

    # 4) Write results CSV
    fieldnames = [
        "status",
        "new_item_id",
        "item_name",
        "old_image_id",
        "image_url",
        "download_path",
        "mime",
        "square_new_image_id",
        "error"
    ]

    with open(OUT_RESULTS_CSV, "w", newline="") as f:
        writer = csv.DictWriter(f, fieldnames=fieldnames)
        writer.writeheader()
        writer.writerows(results)

    print(f"Done. Success: {ok_count}/{total}")
    print(f"Wrote: {OUT_RESULTS_CSV}")
    if DRY_RUN:
        print("DRY_RUN=True so nothing was uploaded; only downloaded.")


if __name__ == "__main__":
    main()