"""URL Inspection API with quota management."""

from __future__ import annotations

import logging

from googleapiclient.errors import HttpError

from src.api.rate_limiter import QuotaTracker, RateLimiter, retry_on_api_error
from src.models.data_models import IndexStatus

logger = logging.getLogger("seo_optimizer")


@retry_on_api_error()
def _inspect_url(service, site_url: str, inspection_url: str) -> dict:
    """Execute a single URL inspection request."""
    request_body = {
        "inspectionUrl": inspection_url,
        "siteUrl": site_url,
    }
    response = service.urlInspection().index().inspect(body=request_body).execute()
    return response.get("inspectionResult", {})


def inspect_url(
    service,
    site_url: str,
    url: str,
    quota_tracker: QuotaTracker | None = None,
    rate_limiter: RateLimiter | None = None,
) -> IndexStatus | None:
    """Inspect a single URL's index status.

    Args:
        service: Search Console v1 service.
        site_url: GSC property URL.
        url: URL to inspect.
        quota_tracker: Optional daily quota tracker.
        rate_limiter: Optional rate limiter.

    Returns:
        IndexStatus or None if quota exhausted.
    """
    if quota_tracker and not quota_tracker.acquire():
        logger.warning("URL Inspection daily quota exhausted")
        return None

    if rate_limiter:
        rate_limiter.acquire(site_url)

    try:
        result = _inspect_url(service, site_url, url)
    except HttpError as e:
        logger.error("URL Inspection failed for %s: %s", url, e)
        return IndexStatus(url=url, verdict="ERROR", coverage_state=str(e))

    index_result = result.get("indexStatusResult", {})

    verdict = index_result.get("verdict", "")
    coverage_state = index_result.get("coverageState", "")
    indexing_state = index_result.get("indexingState", "")
    page_fetch_state = index_result.get("pageFetchState", "")
    robots_txt_state = index_result.get("robotsTxtState", "")
    last_crawl_time = index_result.get("lastCrawlTime", "")

    is_indexed = verdict == "PASS" and indexing_state == "INDEXING_ALLOWED"

    return IndexStatus(
        url=url,
        verdict=verdict,
        coverage_state=coverage_state,
        indexing_state=indexing_state,
        page_fetch_state=page_fetch_state,
        robots_txt_state=robots_txt_state,
        last_crawl_time=last_crawl_time,
        is_indexed=is_indexed,
    )


def batch_inspect(
    service,
    site_url: str,
    urls: list[str],
    quota_tracker: QuotaTracker | None = None,
    rate_limiter: RateLimiter | None = None,
) -> list[IndexStatus]:
    """Inspect multiple URLs with progress logging.

    Args:
        service: Search Console v1 service.
        site_url: GSC property URL.
        urls: List of URLs to inspect.
        quota_tracker: Optional daily quota tracker.
        rate_limiter: Optional rate limiter.

    Returns:
        List of IndexStatus results (may be shorter than urls if quota runs out).
    """
    results = []
    total = len(urls)

    for i, url in enumerate(urls, 1):
        status = inspect_url(
            service, site_url, url,
            quota_tracker=quota_tracker,
            rate_limiter=rate_limiter,
        )

        if status is None:
            logger.warning(
                "Stopping batch inspection at %d/%d — quota exhausted", i, total,
            )
            break

        results.append(status)

        if i % 10 == 0 or i == total:
            remaining = quota_tracker.remaining if quota_tracker else "N/A"
            logger.info(
                "URL Inspection progress: %d/%d (quota remaining: %s)",
                i, total, remaining,
            )

    indexed = sum(1 for r in results if r.is_indexed)
    logger.info(
        "Batch inspection complete for %s: %d/%d indexed (%d checked)",
        site_url, indexed, len(results), len(results),
    )

    return results
