"""
APS File Upload & Translation module.
Upload BIM files (Revit, IFC, DWG, ZIP) to APS and translate to SVF2 web format.
"""

import os
import sys
import base64
import time
import json
import zipfile
import requests
from pathlib import Path
from .auth import get_2legged_token

BASE_URL = "https://developer.api.autodesk.com"
CHUNK_SIZE = 20 * 1024 * 1024  # 20 MB per chunk


def create_bucket(bucket_key=None, token=None):
    """
    Create an OSS bucket for storing BIM files.

    Args:
        bucket_key: Bucket name (default: env APS_BUCKET_KEY)
        token: Access token (auto-fetched if not provided)

    Returns:
        bool: True if bucket exists or was created
    """
    bucket_key = bucket_key or os.getenv("APS_BUCKET_KEY", "dthub-bim-demo")
    token = token or get_2legged_token(
        scopes="data:read data:write data:create bucket:create bucket:read"
    )

    resp = requests.post(
        f"{BASE_URL}/oss/v2/buckets",
        headers={
            "Authorization": f"Bearer {token}",
            "Content-Type": "application/json",
        },
        json={"bucketKey": bucket_key, "policyKey": "persistent"},
    )
    if resp.status_code == 409:
        return True  # Already exists
    resp.raise_for_status()
    return True


def upload_file(file_path, bucket_key=None, token=None, on_progress=None):
    """
    Upload a file to APS bucket via S3 signed URLs.

    Args:
        file_path: Path to the file to upload
        bucket_key: Target bucket (default: env APS_BUCKET_KEY)
        token: Access token (auto-fetched if not provided)
        on_progress: Optional callback(part_num, total_parts, percent)

    Returns:
        str: Base64-encoded URN, or None on failure
    """
    bucket_key = bucket_key or os.getenv("APS_BUCKET_KEY", "dthub-bim-demo")
    token = token or get_2legged_token(
        scopes="data:read data:write data:create bucket:create bucket:read"
    )

    file_path = Path(file_path)
    if not file_path.exists():
        raise FileNotFoundError(f"File not found: {file_path}")

    file_size = file_path.stat().st_size
    object_key = file_path.name
    total_parts = max(1, (file_size + CHUNK_SIZE - 1) // CHUNK_SIZE)
    batch_size = 25

    # Get first batch of signed URLs
    sign_url = f"{BASE_URL}/oss/v2/buckets/{bucket_key}/objects/{object_key}/signeds3upload"
    headers = {"Authorization": f"Bearer {token}"}

    first_batch = min(batch_size, total_parts)
    resp = requests.get(sign_url, headers=headers, params={"parts": first_batch, "firstPart": 1})
    resp.raise_for_status()
    sign_data = resp.json()
    upload_key = sign_data["uploadKey"]

    part_index = 0
    with open(file_path, "rb") as f:
        while part_index < total_parts:
            remaining = total_parts - part_index
            batch_count = min(batch_size, remaining)
            first_part = part_index + 1

            if part_index > 0:
                resp = requests.get(sign_url, headers=headers, params={
                    "parts": batch_count,
                    "firstPart": first_part,
                    "uploadKey": upload_key,
                })
                resp.raise_for_status()
                sign_data = resp.json()

            urls = sign_data["urls"]
            for j, part_url in enumerate(urls):
                chunk = f.read(CHUNK_SIZE)
                if not chunk:
                    break
                for attempt in range(3):
                    try:
                        part_resp = requests.put(
                            part_url,
                            data=chunk,
                            headers={"Content-Type": "application/octet-stream"},
                            timeout=600,
                        )
                        if part_resp.status_code in (200, 201):
                            break
                    except requests.exceptions.RequestException:
                        if attempt < 2:
                            time.sleep(5)
                else:
                    return None

                current = part_index + j + 1
                pct = int(current * 100 / total_parts)
                if on_progress:
                    on_progress(current, total_parts, pct)

            part_index += batch_count

    # Complete the upload
    resp = requests.post(
        f"{BASE_URL}/oss/v2/buckets/{bucket_key}/objects/{object_key}/signeds3upload",
        headers={"Authorization": f"Bearer {token}", "Content-Type": "application/json"},
        json={"uploadKey": upload_key},
    )
    resp.raise_for_status()

    object_id = resp.json()["objectId"]
    urn = base64.urlsafe_b64encode(object_id.encode()).decode().rstrip("=")
    return urn


def translate_model(urn, token=None, output_format="svf2", views=None):
    """
    Start translation job to convert BIM model to web format.

    Args:
        urn: Base64-encoded URN from upload
        token: Access token (auto-fetched if not provided)
        output_format: "svf2" (default) or "svf"
        views: List of views, e.g. ["2d", "3d"]

    Returns:
        str: URN of the translation job
    """
    token = token or get_2legged_token(
        scopes="data:read data:write data:create bucket:create bucket:read"
    )
    views = views or ["2d", "3d"]

    resp = requests.post(
        f"{BASE_URL}/modelderivative/v2/designdata/job",
        headers={
            "Authorization": f"Bearer {token}",
            "Content-Type": "application/json",
        },
        json={
            "input": {"urn": urn},
            "output": {"formats": [{"type": output_format, "views": views}]},
        },
    )
    resp.raise_for_status()
    return urn


def translate_compressed(urn, root_filename, token=None, output_format="svf2", views=None):
    """
    Start translation for a compressed (ZIP) model with root filename.

    Args:
        urn: Base64-encoded URN of the uploaded ZIP
        root_filename: Path inside the zip to the main file
        token: Access token
        output_format: "svf2" or "svf"
        views: List of views

    Returns:
        str: URN
    """
    token = token or get_2legged_token(
        scopes="data:read data:write data:create bucket:create bucket:read"
    )
    views = views or ["3d", "2d"]

    resp = requests.post(
        f"{BASE_URL}/modelderivative/v2/designdata/job",
        headers={
            "Authorization": f"Bearer {token}",
            "Content-Type": "application/json",
        },
        json={
            "input": {
                "urn": urn,
                "compressedUrn": True,
                "rootFilename": root_filename,
            },
            "output": {"formats": [{"type": output_format, "views": views}]},
        },
    )
    resp.raise_for_status()
    return urn


def check_translation_status(urn, token=None):
    """
    Check status of a translation job.

    Args:
        urn: Model URN
        token: Access token

    Returns:
        tuple: (status, progress) e.g. ("success", "100%")
    """
    token = token or get_2legged_token(
        scopes="data:read data:write data:create bucket:create bucket:read"
    )
    resp = requests.get(
        f"{BASE_URL}/modelderivative/v2/designdata/{urn}/manifest",
        headers={"Authorization": f"Bearer {token}"},
    )
    resp.raise_for_status()
    result = resp.json()
    return result.get("status", "unknown"), result.get("progress", "0%")


def wait_for_translation(urn, token=None, timeout=600, poll_interval=10, on_status=None):
    """
    Wait for translation to complete.

    Args:
        urn: Model URN
        token: Access token
        timeout: Maximum wait time in seconds
        poll_interval: Seconds between status checks
        on_status: Optional callback(status, progress)

    Returns:
        bool: True if translation succeeded
    """
    start = time.time()
    while time.time() - start < timeout:
        status, progress = check_translation_status(urn, token)
        if on_status:
            on_status(status, progress)
        if status == "success":
            return True
        if status == "failed":
            return False
        time.sleep(poll_interval)
    return False


def detect_root_file(zip_path):
    """
    Auto-detect the main file inside a ZIP archive.

    Args:
        zip_path: Path to the ZIP file

    Returns:
        str or None: Path to the detected root file inside the ZIP
    """
    main_extensions = ['.rcp', '.rvt', '.ifc', '.nwd']
    with zipfile.ZipFile(zip_path, 'r') as zf:
        for ext in main_extensions:
            matches = [n for n in zf.namelist() if n.lower().endswith(ext) and not n.startswith('__MACOSX')]
            if matches:
                matches.sort(key=len)
                return matches[0]
    return None
