"""Minimal CRUD over the v2 three-layer chain (site → site_department → crawl_target).

Each repo is a plain module of functions taking an SQLAlchemy `Session` so
callers (yaml-sync, adapters, admin API) stay in charge of transactions.

We intentionally stop at the site/dept/target triple for Phase-A; article
writes go through the pipeline directly (see `govcrawler.storage.repo`)
because they need the contract→model projection logic.
"""

from govcrawler.repositories import depts, sites, targets

__all__ = ["sites", "depts", "targets"]
