"""Add site-level crawl-plan config — daily window + daily request cap.

Revision ID: 0005_site_crawl_plan
Revises: 0004_add_local_dept_region
Create Date: 2026-04-24

Anti-bot rules on policy sites trigger when scrapers run outside expected
business windows or hammer the host. We let operators pin a daily TIME
window (`crawl_window_start`/`crawl_window_end`) and a max daily request
cap per crawl_site. The scheduler / fetcher consult these values; values
left NULL mean "no constraint" so existing rows keep current behavior on
upgrade.
"""
from alembic import op
import sqlalchemy as sa

revision = "0005_site_crawl_plan"
down_revision = "0004_add_local_dept_region"
branch_labels = None
depends_on = None


def upgrade() -> None:
    op.add_column("crawl_site", sa.Column("crawl_window_start", sa.Time(), nullable=True))
    op.add_column("crawl_site", sa.Column("crawl_window_end",   sa.Time(), nullable=True))
    op.add_column("crawl_site", sa.Column("daily_max_requests", sa.Integer(), nullable=True))
    op.add_column("crawl_site", sa.Column("weekend_enabled", sa.Boolean(), server_default=sa.text("true"), nullable=False))
    op.add_column("crawl_site", sa.Column("backoff_min_sec",  sa.Integer(), nullable=True))
    op.add_column("crawl_site", sa.Column("backoff_max_sec",  sa.Integer(), nullable=True))


def downgrade() -> None:
    for col in ("backoff_max_sec", "backoff_min_sec", "weekend_enabled",
                "daily_max_requests", "crawl_window_end", "crawl_window_start"):
        op.drop_column("crawl_site", col)
