"""Task-queue admin endpoints — list / cancel crawl jobs.

Per-site concurrency is enforced by `task_queue.TaskQueue`. These endpoints
expose the in-process state for the UI (jobs page).
"""
from __future__ import annotations

from typing import Any

from fastapi import HTTPException, Query

from govcrawler.api.task_queue import get_queue

from ._common import router


@router.get("/api/jobs")
def list_jobs(
    site: str | None = Query(None, description="filter by site_code"),
    status: str | None = Query(None, description="queued|running|done|failed|cancelled"),
    include_history: bool = Query(True),
    limit: int = Query(200, ge=1, le=1000),
) -> dict[str, Any]:
    q = get_queue()
    jobs = [j.to_dict() for j in q.list_jobs(site=site, status=status, include_history=include_history, limit=limit)]
    return {
        "count": len(jobs),
        "queue_summary": q.queue_summary(),
        "jobs": jobs,
    }


@router.post("/api/jobs/{job_id}/cancel")
async def cancel_job(job_id: str) -> dict[str, Any]:
    r = await get_queue().cancel(job_id)
    if not r.get("ok"):
        raise HTTPException(404 if r.get("reason") == "not_found" else 409, r.get("reason", "cannot cancel"))
    return r
