"""Sites + local-departments + site CRUD/toggle/delete."""
from __future__ import annotations

from typing import Any

from fastapi import Body, Depends, HTTPException, Query
from sqlalchemy import String, func, or_, select
from sqlalchemy.orm import Session

from govcrawler.models import Article, CrawlJob, CrawlSite, LocalDepartment
from govcrawler.repositories import sites as sites_repo
from govcrawler.repositories import targets as targets_repo

from ._common import (
    _serialize_site,
    _session,
    _validate_site_payload,
    router,
)


@router.get("/api/sites")
def sites(s: Session = Depends(_session)) -> dict[str, Any]:
    """Return every crawl_site with its crawl_targets nested."""
    site_rows = list(s.execute(select(CrawlSite).order_by(CrawlSite.id)).scalars())
    job_stats: dict[str, dict[str, Any]] = {}
    for job in s.execute(
        select(CrawlJob)
        .where(CrawlJob.status == "done")
        .order_by(CrawlJob.finished_at.desc(), CrawlJob.enqueued_at.desc())
    ).scalars():
        result = job.result_json if isinstance(job.result_json, dict) else {}
        if result.get("status") not in (None, "ok"):
            continue
        stats = job_stats.setdefault(job.target_code, {})
        if "last_crawl_job" not in stats:
            stats["last_crawl_job"] = job
        if job.force and "last_full_crawl_job" not in stats:
            stats["last_full_crawl_job"] = job

    out = []
    for site in site_rows:
        tgts = targets_repo.list_for_site(s, site.id, enabled_only=False)
        out.append(_serialize_site(site, tgts, target_run_stats=job_stats))
    return {"sites": out}


def _local_dept_payload(row: LocalDepartment) -> dict[str, Any]:
    return {
        "dept_id": row.dept_id,
        "dept_name": row.dept_name,
        "short_name": row.short_name,
        "full_name": row.full_name,
        "parent_dept_id": row.parent_dept_id,
        "dept_level": row.dept_level,
        "region": row.region,
    }


@router.get("/api/local-departments")
def local_departments(
    q: str | None = Query(None, description="substring of dept_name / full_name"),
    region: str | None = Query(None, description="filter by region label"),
    limit: int = Query(200, ge=1, le=5000),
    s: Session = Depends(_session),
) -> dict[str, Any]:
    stmt = select(LocalDepartment).order_by(LocalDepartment.dept_id).limit(limit)
    if q:
        like = f"%{q}%"
        stmt = stmt.where(
            or_(
                LocalDepartment.dept_name.ilike(like),
                LocalDepartment.full_name.ilike(like),
                func.cast(LocalDepartment.dept_id, String).ilike(like),
            )
        )
    if region:
        stmt = stmt.where(LocalDepartment.region == region)
    rows = list(s.execute(stmt).scalars())
    return {"count": len(rows), "items": [_local_dept_payload(r) for r in rows]}


@router.post("/api/local-departments")
def create_local_department(
    payload: dict[str, Any] = Body(...),
    s: Session = Depends(_session),
) -> dict[str, Any]:
    dept_id = payload.get("dept_id")
    dept_name = (payload.get("dept_name") or "").strip() or None
    if not isinstance(dept_id, int) or dept_id <= 0:
        raise HTTPException(400, "dept_id must be positive int")
    if s.get(LocalDepartment, dept_id) is not None:
        raise HTTPException(409, f"dept_id exists: {dept_id}")
    row = LocalDepartment(
        dept_id=dept_id,
        dept_name=dept_name,
        short_name=(payload.get("short_name") or "").strip() or None,
        full_name=(payload.get("full_name") or "").strip() or None,
        parent_dept_id=payload.get("parent_dept_id"),
        dept_level=payload.get("dept_level"),
        region=(payload.get("region") or "").strip() or None,
    )
    s.add(row)
    s.commit()
    s.refresh(row)
    return {"department": _local_dept_payload(row)}


@router.put("/api/local-departments/{dept_id}")
def update_local_department(
    dept_id: int,
    payload: dict[str, Any] = Body(...),
    s: Session = Depends(_session),
) -> dict[str, Any]:
    row = s.get(LocalDepartment, dept_id)
    if row is None:
        raise HTTPException(404, f"dept_id not found: {dept_id}")
    for key in ("dept_name", "short_name", "full_name", "region"):
        if key in payload:
            v = payload.get(key)
            setattr(row, key, (v or "").strip() or None if isinstance(v, str) else v)
    for key in ("parent_dept_id", "dept_level"):
        if key in payload:
            v = payload.get(key)
            setattr(row, key, v if (v is None or isinstance(v, int)) else int(v))
    s.commit()
    s.refresh(row)
    return {"department": _local_dept_payload(row)}


@router.delete("/api/local-departments/{dept_id}")
def delete_local_department(
    dept_id: int,
    s: Session = Depends(_session),
) -> dict[str, Any]:
    row = s.get(LocalDepartment, dept_id)
    if row is None:
        raise HTTPException(404, f"dept_id not found: {dept_id}")
    # crawl_target.dept_id and site_department.local_dept_id are ondelete=SET NULL,
    # so historical bindings survive as unbound — delete is safe.
    s.delete(row)
    s.commit()
    return {"deleted": True, "dept_id": dept_id}


@router.post("/api/sites")
def create_site(
    payload: dict[str, Any] = Body(...),
    s: Session = Depends(_session),
) -> dict[str, Any]:
    fields = _validate_site_payload(payload, partial=False)
    site_code = fields.pop("site_code")
    assert site_code is not None
    if sites_repo.get_by_code(s, site_code) is not None:
        raise HTTPException(409, f"site already exists: {site_code}")
    # UI-created rows are tagged so config.sync.sync_dir leaves them alone.
    row = sites_repo.upsert_by_code(s, site_code, managed_by="ui", **fields)
    s.commit()
    s.refresh(row)
    return {"site": _serialize_site(row, [])}


@router.put("/api/sites/{site_code}")
def update_site(
    site_code: str,
    payload: dict[str, Any] = Body(...),
    s: Session = Depends(_session),
) -> dict[str, Any]:
    if "site_code" in payload and payload["site_code"] != site_code:
        raise HTTPException(400, "site_code in path/body must match")
    row = sites_repo.get_by_code(s, site_code)
    if row is None:
        raise HTTPException(404, f"site not found: {site_code}")
    fields = _validate_site_payload(payload, partial=True)
    for key, value in fields.items():
        if key != "site_code":
            setattr(row, key, value)
    s.commit()
    s.refresh(row)
    tgts = targets_repo.list_for_site(s, row.id, enabled_only=False)
    return {"site": _serialize_site(row, tgts)}


@router.post("/api/sites/{site_code}/toggle")
def toggle_site(
    site_code: str, enabled: bool = Query(...),
    s: Session = Depends(_session),
) -> dict[str, Any]:
    row = sites_repo.set_enabled(s, site_code, enabled)
    if row is None:
        raise HTTPException(404, f"site not found: {site_code}")
    s.commit()
    return {"site_code": site_code, "enabled": enabled}


@router.delete("/api/sites/{site_code}")
def delete_site(
    site_code: str, s: Session = Depends(_session),
) -> dict[str, Any]:
    """Hard-delete a site only if nothing crawled yet.

    Deletion is a foot-gun. Article.site_id uses ondelete=RESTRICT on purpose:
    if we ever let the operator nuke a site that still has harvested articles,
    we'd either orphan rows (violating the FK) or silently lose historical
    content. The UI can hide the row with `enabled=False`; hard-delete is only
    for the "created the wrong site_code, nothing crawled yet" case.
    """
    row = sites_repo.get_by_code(s, site_code)
    if row is None:
        raise HTTPException(404, f"site not found: {site_code}")
    article_count = s.scalar(
        select(func.count()).select_from(Article).where(Article.site_id == row.id)
    ) or 0
    if article_count > 0:
        raise HTTPException(
            409,
            f"site {site_code!r} has {article_count} article(s); disable it "
            "instead of deleting, or purge articles first.",
        )
    # No articles → CASCADE wipes site_department + crawl_target rows cleanly.
    s.delete(row)
    s.commit()
    return {"deleted": True, "site_code": site_code}
