from __future__ import annotations


def test_rag_export_after_crawl_defaults_disabled(monkeypatch):
    from govcrawler.settings import Settings

    monkeypatch.delenv("RAG_EXPORT_AFTER_CRAWL_ENABLED", raising=False)

    settings = Settings(_env_file=None, db_url="sqlite:///settings.db")

    assert settings.rag_export_after_crawl_enabled is False


def test_task_queue_skips_rag_export_after_crawl_when_disabled(monkeypatch):
    from govcrawler.api import task_queue
    from govcrawler import runtime_config

    scheduled = []

    monkeypatch.setattr(runtime_config, "_rag_export_after_crawl_enabled", None)
    monkeypatch.setattr(
        runtime_config,
        "get_settings",
        lambda: type("Settings", (), {"rag_export_after_crawl_enabled": False})(),
    )
    monkeypatch.setattr(task_queue.asyncio, "create_task", lambda *args, **kwargs: scheduled.append((args, kwargs)))

    queue = task_queue.TaskQueue()
    job = task_queue.JobInfo(
        job_id="job-1",
        site_code="demo",
        target_code="demo__news",
        source="manual",
        status="done",
    )

    queue._submit_rag_export_after_crawl(job)

    assert scheduled == []
