import { BaseLLM } from "./baseLLM";
import { ChatMessage } from "../index";
import { streamSse } from "./stream";



export class DeepSeek extends BaseLLM {
    apiKey: string
    modelName: string
    url: string

    constructor(apiKey: string, modelName: string, url: string) {
        super();
        this.apiKey = apiKey;
        this.modelName = modelName;
        this.url = url;
    }
      
    protected _getHeaders() {
        return {
        "Content-Type": "application/json",
        Authorization: `Bearer ${this.apiKey}`,
        "api-key": this.apiKey ?? "", // For Azure
        };
    }

    protected _convertArgs(
        messages: ChatMessage[],
      ) {
        return {
            messages: messages,
            model: this.modelName,
          };
      }

    public async *_legacystreamComplete(
        _messages: ChatMessage[],
        signal: AbortSignal,
      ): AsyncGenerator<string> {
        const args = this._convertArgs( _messages);
    
        const response = await this.fetch(new URL(this.url), {
          method: "POST",
          headers: this._getHeaders(),
          body: JSON.stringify({
            ...args,
            stream: true,
            ...{}
          }),
          signal,
        });
    
        for await (const value of streamSse(response)) {
          let text = value.choices?.[0]?.delta.content;
          //let isDone = value.finish_reason !== "eos";
          if (text) {
            yield text;
          }
        }
    }

    public async *_streamChat(
      messages: ChatMessage[],
      signal: AbortSignal,
    ): AsyncGenerator<ChatMessage> {
      for await (const content of this._legacystreamComplete(
        messages,
        signal
      )) {
        yield {
          role: "assistant",
          content,
        };
      }
    }
}