"""Backfill gov.cn zcwjk article standard metadata from stored list payloads.

The zcwjk list API already carries government-document fields such as
索引号、发文机关、主题分类、发文字号、发布日期、成文日期. Older ingested rows
kept the raw list item under article.metadata_json["raw"], but did not map all
of those fields onto the normalized article columns.

Idempotent: rerunning the script produces no changes once rows match the raw
payload. By default it corrects mismatched legacy values. Use --only-missing if
you only want to fill NULL/empty fields.

Run inside the api container:

    docker exec docker-api-1 python /app/scripts/backfill_gov_cn_zcwjk_metadata.py --dry-run
    docker exec docker-api-1 python /app/scripts/backfill_gov_cn_zcwjk_metadata.py
"""
from __future__ import annotations

import argparse
import sys
from collections import Counter
from dataclasses import dataclass
from datetime import date, datetime
from pathlib import Path
from typing import Any

sys.path.insert(0, str(Path(__file__).resolve().parent.parent))

from sqlalchemy import select  # noqa: E402

from govcrawler.adapters.gov_cn_policy import (  # noqa: E402
    _normalize_publish_time,
    _split_policy_category,
)
from govcrawler.db import get_sessionmaker  # noqa: E402
from govcrawler.models import Article, CrawlTarget  # noqa: E402


@dataclass(frozen=True)
class BackfillStats:
    scanned: int
    changed: int
    skipped_no_raw: int
    field_counts: Counter[str]


def _coerce_raw(meta: Any) -> dict[str, Any] | None:
    if not isinstance(meta, dict):
        return None
    raw = meta.get("raw")
    return raw if isinstance(raw, dict) else None


def _values_from_raw(raw: dict[str, Any]) -> dict[str, Any]:
    pub_dt = _normalize_publish_time(raw.get("pubtime") or raw.get("pubtimeStr"))
    if pub_dt is None:
        pub_dt = _normalize_publish_time(raw.get("ptime"))
    doc_dt = _normalize_publish_time(raw.get("ptime"))
    top_category, sub_category, open_category = _split_policy_category(raw.get("childtype"))
    publisher = raw.get("puborg") or None

    return {
        "native_post_id": str(raw.get("id")) if raw.get("id") else None,
        "index_no": raw.get("index") or None,
        "publisher": publisher,
        "source_raw": publisher,
        "doc_no": raw.get("pcode") or None,
        "publish_time": pub_dt,
        "publish_date": doc_dt.date() if doc_dt else (pub_dt.date() if pub_dt else None),
        "topic_words": raw.get("subjectword") or None,
        "open_category": open_category,
        "content_category": top_category,
        "content_subcategory": sub_category,
    }


def _is_empty(v: Any) -> bool:
    return v is None or v == ""


def _same_value(left: Any, right: Any) -> bool:
    if isinstance(left, datetime) and isinstance(right, datetime):
        return left.replace(microsecond=0) == right.replace(microsecond=0)
    if isinstance(left, date) and isinstance(right, date):
        return left == right
    return left == right


def _apply_values(article: Article, values: dict[str, Any], *, only_missing: bool) -> list[str]:
    changed_fields: list[str] = []
    for field, new_value in values.items():
        if _is_empty(new_value):
            continue
        old_value = getattr(article, field)
        if only_missing and not _is_empty(old_value):
            continue
        if _same_value(old_value, new_value):
            continue
        setattr(article, field, new_value)
        changed_fields.append(field)
    return changed_fields


def run_backfill(
    *,
    target_code: str,
    dry_run: bool,
    only_missing: bool,
    limit: int | None,
) -> BackfillStats:
    Session = get_sessionmaker()
    field_counts: Counter[str] = Counter()
    scanned = 0
    changed = 0
    skipped_no_raw = 0

    with Session() as s:
        target = s.scalar(select(CrawlTarget).where(CrawlTarget.target_code == target_code))
        if target is None:
            raise SystemExit(f"target not found: {target_code}")

        stmt = (
            select(Article)
            .where(Article.target_id == target.id)
            .order_by(Article.id.asc())
        )
        if limit is not None:
            stmt = stmt.limit(limit)

        for article in s.scalars(stmt):
            scanned += 1
            raw = _coerce_raw(article.metadata_json)
            if raw is None:
                skipped_no_raw += 1
                continue
            fields = _apply_values(
                article,
                _values_from_raw(raw),
                only_missing=only_missing,
            )
            if fields:
                changed += 1
                field_counts.update(fields)

        if dry_run:
            s.rollback()
        else:
            s.commit()

    return BackfillStats(
        scanned=scanned,
        changed=changed,
        skipped_no_raw=skipped_no_raw,
        field_counts=field_counts,
    )


def _build_parser() -> argparse.ArgumentParser:
    p = argparse.ArgumentParser(description=__doc__)
    p.add_argument("--target-code", default="gov_cn_zcwjk_all")
    p.add_argument("--dry-run", action="store_true")
    p.add_argument("--only-missing", action="store_true")
    p.add_argument("--limit", type=int, default=None)
    return p


def main(argv: list[str] | None = None) -> int:
    args = _build_parser().parse_args(argv)
    stats = run_backfill(
        target_code=args.target_code,
        dry_run=args.dry_run,
        only_missing=args.only_missing,
        limit=args.limit,
    )
    mode = "dry_run" if args.dry_run else "committed"
    print(
        f"mode={mode} target={args.target_code} scanned={stats.scanned} "
        f"changed={stats.changed} skipped_no_raw={stats.skipped_no_raw}"
    )
    if stats.field_counts:
        print("field_counts=" + ",".join(
            f"{k}:{v}" for k, v in sorted(stats.field_counts.items())
        ))
    return 0


if __name__ == "__main__":
    raise SystemExit(main())
