import { BaseLLMParams, LLM } from "@langchain/core/language_models/llms";

export interface BittensorInput extends BaseLLMParams {
  systemPrompt?: string | null | undefined;
  topResponses?: number | undefined;
}
interface Message {
  role: string;
  content: string;
}

interface ChatPayload {
  uids?: string[];
  top_n?: number;
  messages: Message[];
}

interface APIKeyResponse {
  api_key: string;
}

interface ChatResponse {
  choices?: { message: Message }[];
}
/**
 * Class representing the Neural Internet language model powerd by Bittensor, a decentralized network
 * full of different AI models.
 * To analyze API_KEYS and logs of you usage visit
 *      https://api.neuralinternet.ai/api-keys
 *      https://api.neuralinternet.ai/logs
 */
export class NIBittensorLLM extends LLM implements BittensorInput {
  static lc_name(): string {
    return "NIBittensorLLM";
  }

  systemPrompt: string;

  topResponses: number | undefined;

  constructor(fields?: BittensorInput) {
    super(fields ?? {});
    this.systemPrompt =
      fields?.systemPrompt ??
      "You are an assistant which is created by Neural Internet(NI) in decentralized network named as a Bittensor. Your task is to provide accurate response based on user prompt";
    this.topResponses = fields?.topResponses;
  }

  _llmType(): string {
    return "NIBittensorLLM";
  }

  /** Call out to NIBittensorLLM's complete endpoint.
   Args:
       prompt: The prompt to pass into the model.

       Returns: The string generated by the model.

   Example:
   let response = niBittensorLLM.call("Tell me a joke.");
   */
  async _call(prompt: string): Promise<string> {
    try {
      // Retrieve API KEY
      const apiKeyResponse: Response = await fetch(
        "https://test.neuralinternet.ai/admin/api-keys/"
      );
      if (!apiKeyResponse.ok) {
        throw new Error("Network response was not ok");
      }
      const apiKeysData: APIKeyResponse[] = await apiKeyResponse.json();
      const apiKey: string = apiKeysData[0].api_key;

      const headers = {
        "Content-Type": "application/json",
        Authorization: `Bearer ${apiKey}`,
        "Endpoint-Version": "2023-05-19",
      };
      if (this.topResponses !== undefined) {
        this.topResponses = this.topResponses > 100 ? 100 : this.topResponses;
      } else {
        this.topResponses = 0;
      }

      const minerResponse: Response = await fetch(
        "https://test.neuralinternet.ai/top_miner_uids",
        { headers }
      );
      if (!minerResponse.ok) {
        throw new Error("Network response was not ok");
      }
      const uids: string[] = await minerResponse.json();

      if (Array.isArray(uids) && uids.length && this.topResponses === 0) {
        for (const uid of uids) {
          try {
            const payload: ChatPayload = {
              uids: [uid],
              messages: [
                { role: "system", content: this.systemPrompt },
                { role: "user", content: prompt },
              ],
            };

            const response: Response = await fetch(
              "https://test.neuralinternet.ai/chat",
              {
                method: "POST",
                headers,
                body: JSON.stringify(payload),
              }
            );

            if (!response.ok) {
              throw new Error("Network response was not ok");
            }

            const chatData: ChatResponse = await response.json();

            if (chatData.choices) {
              return chatData.choices[0].message.content;
            }
          } catch (error) {
            continue;
          }
        }
      }

      // For top miner based on bittensor response
      if (this.topResponses === 0) {
        this.topResponses = 10;
      }
      const payload: ChatPayload = {
        top_n: this.topResponses,
        messages: [
          { role: "system", content: this.systemPrompt },
          { role: "user", content: prompt },
        ],
      };

      const response: Response = await fetch(
        "https://test.neuralinternet.ai/chat",
        {
          method: "POST",
          headers,
          body: JSON.stringify(payload),
        }
      );

      if (!response.ok) {
        throw new Error("Network response was not ok");
      }

      const responseData: ChatResponse | string = await response.json();

      if (this.topResponses) {
        return <string>responseData;
      } else if ((<ChatResponse>responseData).choices) {
        // eslint-disable-next-line @typescript-eslint/no-explicit-any
        const temp: any = (<ChatResponse>responseData).choices;
        return <string>temp[0].message.content;
      }
    } catch (error) {
      return "Sorry I am unable to provide response now, Please try again later.";
    }
    return "default";
  }

  identifyingParams(): {
    systemPrompt: string | null | undefined;
    topResponses: number | undefined;
  } {
    return {
      systemPrompt: this.systemPrompt,
      topResponses: this.topResponses,
    };
  }
}
