from __future__ import annotations

from typing import Any
from urllib.parse import urlparse

from fastapi import Body, Depends, HTTPException, Query
from sqlalchemy import case, desc, func, select
from sqlalchemy.orm import Session

from govcrawler.models import Article, ArticleRagPushLog, CrawlSite, CrawlTarget
from govcrawler.runtime_config import (
    rag_export_after_crawl_config,
    set_rag_export_after_crawl_enabled,
)
from govcrawler.settings import get_settings

from ._common import _session, router


def _rag_server_payload() -> dict[str, Any]:
    cfg = get_settings()
    url = cfg.rag_gov_public_kb_ingest_url or ""
    parsed = urlparse(url)
    return {
        "url": url,
        "profile": "gov_public_kb",
        "scheme": parsed.scheme or "",
        "host": parsed.hostname or "",
        "port": parsed.port,
        "path": parsed.path or "",
    }


def _summary(s: Session) -> dict[str, Any]:
    row = s.execute(
        select(
            func.count(ArticleRagPushLog.id),
            func.sum(case((ArticleRagPushLog.status == "failed", 1), else_=0)),
            func.avg(ArticleRagPushLog.duration_ms),
        )
    ).first()
    total = int(row[0] or 0) if row else 0
    failed = int(row[1] or 0) if row else 0
    avg_ms = float(row[2]) if row and row[2] is not None else None
    return {
        "push_count": total,
        "failed_count": failed,
        "avg_duration_ms": avg_ms,
        "rag_server": _rag_server_payload(),
        "auto_push": rag_export_after_crawl_config(),
    }


@router.get("/api/rag-push-logs")
def list_rag_push_logs(
    q: str | None = Query(None, description="substring of article title"),
    source: str | None = Query(None, description="manual | auto"),
    status: str | None = Query(None, description="running | completed | failed"),
    limit: int = Query(20, ge=1, le=200),
    offset: int = Query(0, ge=0),
    s: Session = Depends(_session),
) -> dict[str, Any]:
    filters = []
    if q:
        filters.append(Article.title.ilike(f"%{q}%"))
    if source:
        if source not in {"manual", "auto"}:
            raise HTTPException(400, "source must be manual or auto")
        filters.append(ArticleRagPushLog.source == source)
    if status:
        if status not in {"running", "completed", "failed"}:
            raise HTTPException(400, "status must be running, completed or failed")
        filters.append(ArticleRagPushLog.status == status)

    count_stmt = (
        select(func.count(ArticleRagPushLog.id))
        .select_from(ArticleRagPushLog)
        .join(Article, Article.id == ArticleRagPushLog.article_id)
        .join(CrawlSite, CrawlSite.id == Article.site_id, isouter=True)
        .join(CrawlTarget, CrawlTarget.id == Article.target_id, isouter=True)
    )
    for f in filters:
        count_stmt = count_stmt.where(f)
    total = int(s.execute(count_stmt).scalar() or 0)

    stmt = (
        select(
            ArticleRagPushLog,
            Article.title.label("article_title"),
            Article.url.label("article_url"),
            CrawlSite.site_code.label("site_code"),
            CrawlTarget.target_code.label("target_code"),
        )
        .select_from(ArticleRagPushLog)
        .join(Article, Article.id == ArticleRagPushLog.article_id)
        .join(CrawlSite, CrawlSite.id == Article.site_id, isouter=True)
        .join(CrawlTarget, CrawlTarget.id == Article.target_id, isouter=True)
        .order_by(desc(ArticleRagPushLog.started_at), desc(ArticleRagPushLog.id))
        .offset(offset)
        .limit(limit)
    )
    for f in filters:
        stmt = stmt.where(f)
    rows = s.execute(stmt).all()
    return {
        "count": len(rows),
        "total": total,
        "limit": limit,
        "offset": offset,
        "summary": _summary(s),
        "items": [
            {
                "id": r.ArticleRagPushLog.id,
                "article_id": r.ArticleRagPushLog.article_id,
                "article_title": r.article_title,
                "article_url": r.article_url,
                "site_code": r.site_code,
                "target_code": r.target_code,
                "source": r.ArticleRagPushLog.source,
                "status": r.ArticleRagPushLog.status,
                "file_count": r.ArticleRagPushLog.file_count,
                "duration_ms": r.ArticleRagPushLog.duration_ms,
                "started_at": r.ArticleRagPushLog.started_at.isoformat()
                if r.ArticleRagPushLog.started_at else None,
                "finished_at": r.ArticleRagPushLog.finished_at.isoformat()
                if r.ArticleRagPushLog.finished_at else None,
                "error_msg": r.ArticleRagPushLog.error_msg,
                "task_ids": r.ArticleRagPushLog.task_ids or [],
                "rag_ingest_url": r.ArticleRagPushLog.rag_ingest_url,
            }
            for r in rows
        ],
    }


@router.get("/api/rag-push-logs/summary")
def rag_push_summary(s: Session = Depends(_session)) -> dict[str, Any]:
    return _summary(s)


@router.put("/api/rag-push-logs/config")
def update_rag_push_config(
    payload: dict[str, Any] = Body(...),
) -> dict[str, Any]:
    if "enabled" not in payload or not isinstance(payload.get("enabled"), bool):
        raise HTTPException(400, "enabled must be boolean")
    set_rag_export_after_crawl_enabled(payload["enabled"])
    return rag_export_after_crawl_config()
