package com.gzzm.lobster.llm;

import java.util.List;
import java.util.concurrent.atomic.AtomicReference;

/**
 * LobsterLlmAdapter —— 大龙虾自建的 LLM 适配器接口 /
 * Adapter interface wrapping LangChain4j's ChatLanguageModel.
 *
 * <p>内部可以包装 LangChain4j OpenAiChatModel / OllamaChatModel，也可以
 * 为特殊协议做直接 HTTP 实现。由 LLM 管理层负责装配、路由、降级、审计。
 * Internally wraps LangChain4j models or implements direct HTTP; routing
 * and governance is done by the LLM management layer.
 */
public interface LobsterLlmAdapter {

    /** 绑定的 ModelProfile / Returns the model profile this adapter serves. */
    ModelProfile profile();

    /** 同步调用 / Synchronous invocation. */
    LlmResponse chat(List<LobsterMessage> messages, List<ToolSpec> tools);

    /** 流式调用（旧接口，保持兼容） / Streaming invocation (legacy). */
    void chatStream(List<LobsterMessage> messages, List<ToolSpec> tools, StreamingResponseHandler handler);

    /**
     * 带会话句柄的流式调用 / Streaming invocation returning a {@link StreamingSession}.
     *
     * <p>新接口：上层可通过返回的 {@link StreamingSession} 主动 {@code cancel(reason)}
     * 来强制关闭底层 HTTP 连接，而不是依赖 handler 侧的 {@code isCancelled()} 轮询。
     *
     * <p>默认实现：把 {@link CancelReason} 回桥到旧 handler 的 {@code isCancelled()}，
     * 保证老 adapter 不需要改动就能向上提供 Session 语义。原生实现可以重写此方法，
     * 直接持有 {@code HttpURLConnection} 并在 cancel 时 {@code disconnect()}。
     *
     * <p>New-style streaming: cancel(reason) tears down the HTTP call.
     * Default impl bridges to legacy chatStream + handler.isCancelled().
     */
    default StreamingSession startChatStream(List<LobsterMessage> messages, List<ToolSpec> tools,
                                             StreamingResponseHandler handler) {
        final AtomicReference<CancelReason> cancelRef = new AtomicReference<>();
        final StreamingResponseHandler bridged = new StreamingResponseHandler() {
            @Override public void onDelta(String delta) { handler.onDelta(delta); }
            @Override public void onReasoningDelta(String delta) { handler.onReasoningDelta(delta); }
            @Override public void onToolCall(ToolCall toolCall) { handler.onToolCall(toolCall); }
            @Override public void onWriteFileContentDelta(String toolCallId, int toolIndex, String contentDelta) {
                handler.onWriteFileContentDelta(toolCallId, toolIndex, contentDelta);
            }
            @Override public void onComplete(LlmResponse response) { handler.onComplete(response); }
            @Override public void onError(Throwable error) { handler.onError(error); }
            @Override public boolean isCancelled() {
                return cancelRef.get() != null || handler.isCancelled();
            }
        };
        final StreamingSession session = new StreamingSession() {
            @Override public void cancel(CancelReason reason) {
                cancelRef.compareAndSet(null, reason == null ? CancelReason.USER : reason);
            }
            @Override public boolean isCancelled() { return cancelRef.get() != null; }
            @Override public CancelReason cancelReason() { return cancelRef.get(); }
        };
        // 同步执行：上层如需并行取消须把本调用丢到 executor。AgentRuntime 已这么做。
        chatStream(messages, tools, bridged);
        return session;
    }
}
