import { ChatMessage, CompletionOptions } from "..";
import { withExponentialBackoff } from "../utils/withExponentialBackoff";

export abstract class BaseLLM {
    fetch(url: RequestInfo | URL, init?: RequestInit): Promise<Response> {
        // Custom Node.js fetch
        const customFetch = async (input: URL | RequestInfo, init: any) => {
          try {
            const resp: Response = await fetch(input, init);
    
            // Error mapping to be more helpful
            if (!resp.ok) {
              let text = await resp.text();
              if (resp.status === 404 && !resp.url.includes("/v1")) {
                if (text.includes("try pulling it first")) {
                  const model = JSON.parse(text).error.split(" ")[1].slice(1, -1);
                  text = `The model "${model}" was not found. To download it, run \`ollama run ${model}\`.`;
                } else if (text.includes("/api/chat")) {
                  text =
                    "The /api/chat endpoint was not found. This may mean that you are using an older version of Ollama that does not support /api/chat. Upgrading to the latest version will solve the issue.";
                } else {
                  text =
                    "This may mean that you forgot to add '/v1' to the end of your 'apiBase' in config.json.";
                }
              } else if (
                resp.status === 404 &&
                resp.url.includes("api.openai.com")
              ) {
                text =
                  "You may need to add pre-paid credits before using the OpenAI API.";
              } else if (
                resp.status === 401 &&
                (resp.url.includes("api.mistral.ai") ||
                  resp.url.includes("codestral.mistral.ai"))
              ) {
                if (resp.url.includes("codestral.mistral.ai")) {
                  throw new Error(
                    "You are using a Mistral API key, which is not compatible with the Codestral API. Please either obtain a Codestral API key, or use the Mistral API by setting 'apiBase' to 'https://api.mistral.ai/v1' in config.json.",
                  );
                } else {
                  throw new Error(
                    "You are using a Codestral API key, which is not compatible with the Mistral API. Please either obtain a Mistral API key, or use the the Codestral API by setting 'apiBase' to 'https://codestral.mistral.ai/v1' in config.json.",
                  );
                }
              }
              throw new Error(
                `HTTP ${resp.status} ${resp.statusText} from ${resp.url}\n\n${text}`,
              );
            }
    
            return resp;
          } catch (e: any) {
            // Errors to ignore
            if (e.message.includes("/api/tags")) {
              throw new Error(`Error fetching tags: ${e.message}`);
            } else if (e.message.includes("/api/show")) {
              throw new Error(
                `HTTP ${e.response.status} ${e.response.statusText} from ${e.response.url}\n\n${e.response.body}`,
              );
            } else {
              if (e.name !== "AbortError") {
                // Don't pollute console with abort errors. Check on name instead of instanceof, to avoid importing node-fetch here
                console.debug(
                  `${e.message}\n\nCode: ${e.code}\nError number: ${e.errno}\nSyscall: ${e.erroredSysCall}\nType: ${e.type}\n\n${e.stack}`,
                );
              }
              if (
                e.code === "ECONNREFUSED" &&
                e.message.includes("http://127.0.0.1:11434")
              ) {
                // TODO
              }
            }
            throw new Error(e.message);
          }
        };
        return withExponentialBackoff<Response>(
          () => customFetch(url, init) as any,
          5,
          0.5,
        );
    }

    async *_streamChat(
      _messages: ChatMessage[],
      signal: AbortSignal,
    ): AsyncGenerator<ChatMessage> {
      throw new Error("Not implemented");
    }

    async *_legacystreamComplete(
      _messages: ChatMessage[],
      signal: AbortSignal,
    ): AsyncGenerator<string> {
      throw new Error("Method not implemented.");
    }
}