"""Daily SEO Optimizer — Main entry point with parallel site processing."""

from __future__ import annotations

import signal
import sys
import time
from concurrent.futures import ThreadPoolExecutor, as_completed
from datetime import date, timedelta

from src.analysis.kpi_calculator import check_index_rate, check_kpi_thresholds
from src.analysis.keyword_tracker import track_keywords
from src.analysis.trend_analyzer import compare_periods, detect_drops
from src.api.rate_limiter import QuotaTracker, RateLimiter
from src.api.search_analytics import fetch_for_period
from src.api.sitemaps import check_and_resubmit
from src.api.url_inspection import batch_inspect
from src.auth.gsc_auth import authenticate
from src.config.settings import Settings, SiteConfig, load_settings
from src.logging_setup.logger import cleanup_old_logs, setup_logger
from src.models.data_models import DailyReport, SiteReport
from src.reporting.email_sender import send_email
from src.reporting.report_builder import build_alert_html, build_daily_report_html

logger = setup_logger()


def process_site(
    site: SiteConfig,
    settings: Settings,
    webmasters_service,
    searchconsole_service,
    rate_limiter: RateLimiter,
    quota_tracker: QuotaTracker,
) -> SiteReport:
    """Process a single site: analytics, analysis, sitemaps, inspection.

    Args:
        site: Site configuration.
        settings: Global settings.
        webmasters_service: Webmasters v3 API service.
        searchconsole_service: Search Console v1 API service.
        rate_limiter: Shared rate limiter.
        quota_tracker: Shared URL Inspection quota tracker.

    Returns:
        SiteReport with all gathered data.
    """
    report = SiteReport(
        site_name=site.display_name,
        property_url=site.property_url,
        report_date=date.today(),
    )

    today = date.today()

    # --- Search Analytics ---
    try:
        logger.info("[%s] Fetching Search Analytics...", site.display_name)

        # Yesterday (latest available data is typically 2-3 days ago)
        end_date = today - timedelta(days=2)
        start_date = end_date

        current_data = fetch_for_period(
            webmasters_service, site.property_url,
            start_date, end_date, rate_limiter,
        )
        report.total_metrics = current_data["total"]
        report.top_queries = current_data["top_queries"]
        report.top_pages = current_data["top_pages"]

        # --- Trend Analysis ---
        logger.info("[%s] Analyzing trends...", site.display_name)

        # Previous day
        prev_day_start = start_date - timedelta(days=1)
        prev_day_data = fetch_for_period(
            webmasters_service, site.property_url,
            prev_day_start, prev_day_start, rate_limiter,
        )
        trend_day = compare_periods(
            current_data["total"], prev_day_data["total"], "前日比",
        )

        # 7-day comparison
        week_end = end_date
        week_start = week_end - timedelta(days=6)
        prev_week_end = week_start - timedelta(days=1)
        prev_week_start = prev_week_end - timedelta(days=6)

        current_week = fetch_for_period(
            webmasters_service, site.property_url,
            week_start, week_end, rate_limiter,
        )
        prev_week = fetch_for_period(
            webmasters_service, site.property_url,
            prev_week_start, prev_week_end, rate_limiter,
        )
        trend_week = compare_periods(
            current_week["total"], prev_week["total"], "7日比",
        )

        # 28-day comparison
        month_end = end_date
        month_start = month_end - timedelta(days=27)
        prev_month_end = month_start - timedelta(days=1)
        prev_month_start = prev_month_end - timedelta(days=27)

        current_month = fetch_for_period(
            webmasters_service, site.property_url,
            month_start, month_end, rate_limiter,
        )
        prev_month = fetch_for_period(
            webmasters_service, site.property_url,
            prev_month_start, prev_month_end, rate_limiter,
        )
        trend_month = compare_periods(
            current_month["total"], prev_month["total"], "28日比",
        )

        report.trends = [trend_day, trend_week, trend_month]

        # Detect drops
        drop_alerts = detect_drops(report.trends, site.display_name)
        report.errors.extend(drop_alerts)

    except Exception as e:
        error_msg = f"Search Analytics error: {e}"
        logger.error("[%s] %s", site.display_name, error_msg)
        report.errors.append(error_msg)

    # --- KPI Check ---
    try:
        report.kpi_alerts = check_kpi_thresholds(
            report.total_metrics, settings.kpi_thresholds, site.display_name,
        )
    except Exception as e:
        logger.error("[%s] KPI check error: %s", site.display_name, e)

    # --- Keyword Tracking ---
    try:
        if settings.tracked_keywords:
            logger.info("[%s] Tracking keywords...", site.display_name)
            report.keyword_positions = track_keywords(
                current_data.get("rows_by_query", []),
                settings.tracked_keywords,
                site.property_url,
                settings.keyword_alert_threshold,
            )
    except Exception as e:
        error_msg = f"Keyword tracking error: {e}"
        logger.error("[%s] %s", site.display_name, error_msg)
        report.errors.append(error_msg)

    # --- Sitemaps ---
    try:
        logger.info("[%s] Checking sitemaps...", site.display_name)
        report.sitemap_statuses = check_and_resubmit(
            webmasters_service,
            site.property_url,
            site.sitemap_urls,
            settings.max_sitemap_resubmit_retries,
            rate_limiter,
        )
    except Exception as e:
        error_msg = f"Sitemap check error: {e}"
        logger.error("[%s] %s", site.display_name, error_msg)
        report.errors.append(error_msg)

    # --- URL Inspection ---
    try:
        # Collect page URLs from analytics to inspect
        page_urls = [p.page for p in report.top_pages]
        sample_size = min(settings.url_inspection_sample_size, len(page_urls))
        urls_to_inspect = page_urls[:sample_size]

        if urls_to_inspect:
            logger.info(
                "[%s] Inspecting %d URLs...", site.display_name, len(urls_to_inspect),
            )
            inspection_results = batch_inspect(
                searchconsole_service,
                site.property_url,
                urls_to_inspect,
                quota_tracker,
                rate_limiter,
            )

            report.index_checked_count = len(inspection_results)
            report.index_ok_count = sum(1 for r in inspection_results if r.is_indexed)
            report.unindexed_pages = [r for r in inspection_results if not r.is_indexed]

            # Check index rate
            index_alert = check_index_rate(
                report.index_checked_count,
                report.index_ok_count,
                settings.kpi_thresholds,
                site.display_name,
            )
            if index_alert:
                report.kpi_alerts.append(index_alert)

    except Exception as e:
        error_msg = f"URL Inspection error: {e}"
        logger.error("[%s] %s", site.display_name, error_msg)
        report.errors.append(error_msg)

    logger.info("[%s] Site processing complete", site.display_name)
    return report


def run() -> None:
    """Main execution: process all sites in parallel, send reports."""
    start_time = time.time()
    logger.info("=" * 60)
    logger.info("Daily SEO Optimizer started")
    logger.info("=" * 60)

    # Cleanup old logs
    deleted = cleanup_old_logs()
    if deleted:
        logger.info("Cleaned up %d old log files", deleted)

    # Load settings
    settings = load_settings()
    logger.info("Loaded config: %d sites, %d tracked keywords",
                len(settings.sites), len(settings.tracked_keywords))

    # Authenticate
    logger.info("Authenticating with Google APIs...")
    webmasters, searchconsole, credentials = authenticate(
        settings.service_account_key_path,
    )

    # Shared resources
    rate_limiter = RateLimiter(max_qpm=1200)
    quota_tracker = QuotaTracker(daily_limit=2000)

    # Timeout guard
    timeout_seconds = settings.execution_timeout_hours * 3600

    def timeout_handler(signum, frame):
        logger.error("Execution timeout (%d hours) reached!", settings.execution_timeout_hours)
        sys.exit(1)

    # SIGALRM is Unix-only; skip on Windows
    if hasattr(signal, "SIGALRM"):
        signal.signal(signal.SIGALRM, timeout_handler)
        signal.alarm(timeout_seconds)

    # Process sites in parallel
    site_reports: list[SiteReport] = []
    alert_messages: dict[str, list[str]] = {}

    with ThreadPoolExecutor(max_workers=3) as executor:
        futures = {
            executor.submit(
                process_site, site, settings,
                webmasters, searchconsole,
                rate_limiter, quota_tracker,
            ): site
            for site in settings.sites
        }

        for future in as_completed(futures):
            site = futures[future]
            try:
                report = future.result(timeout=timeout_seconds)
                site_reports.append(report)

                # Collect alert messages
                site_alert_msgs = []
                for alert in report.kpi_alerts:
                    site_alert_msgs.append(alert.message)
                for kp in report.keyword_positions:
                    if kp.is_alert:
                        site_alert_msgs.append(
                            f"キーワード順位変動: '{kp.keyword}' "
                            f"{kp.previous_position:.1f} → {kp.current_position:.1f} "
                            f"({kp.change:+.1f})"
                        )
                site_alert_msgs.extend(report.errors)

                if site_alert_msgs:
                    alert_messages[site.display_name] = site_alert_msgs

            except Exception as e:
                logger.error("Failed to process %s: %s", site.display_name, e)
                error_report = SiteReport(
                    site_name=site.display_name,
                    property_url=site.property_url,
                    errors=[f"Processing failed: {e}"],
                )
                site_reports.append(error_report)
                alert_messages[site.display_name] = [f"処理失敗: {e}"]

    # Build daily report
    elapsed = time.time() - start_time
    daily_report = DailyReport(
        report_date=date.today(),
        site_reports=site_reports,
        has_alerts=bool(alert_messages),
        execution_time_seconds=elapsed,
    )

    # Send immediate alert if needed
    if alert_messages:
        logger.warning("Alerts detected — sending immediate alert email")
        alert_html = build_alert_html(alert_messages)
        send_email(
            settings.email,
            f"[SEO Alert] 異常検知 - {date.today()}",
            alert_html,
        )

    # Send daily report
    logger.info("Building and sending daily report...")
    report_html = build_daily_report_html(daily_report)
    send_email(
        settings.email,
        f"[Daily SEO Report] {date.today()}",
        report_html,
    )

    logger.info("=" * 60)
    logger.info("Daily SEO Optimizer completed in %.1f seconds", elapsed)
    logger.info("=" * 60)


if __name__ == "__main__":
    run()
