import { makeAutoObservable } from 'mobx';
import { get } from '@app/utils/http';
import ws from '@app/utils/websocket';
import { safeParse } from '@app/utils/function';
import * as ngqlDoc from '@app/utils/ngql';
import schema from './schema';
import rootStore from '.';

export const matchPrompt = `请你担任Graph数据库助手。
现有以下文档资料：
----
请仅使用Schema中提供的关系类型和属性。
不要使用任何未在Schema中定义的关系类型或属性。
Schema结构：
---
{schema}
---
注意：NebulaGraph使用与标准Cypher不同的语法规则：
1. 使用双等号进行比较：== 而不是 =
2. 引用节点属性时需要显式指定标签，例如：
v是节点的变量，已知其标签为Foo，正确的写法是v.foo.name
而v.name是错误的写法。
标准Cypher与NebulaGraph语法差异示例：
diff
< MATCH (p:person)-[:directed]->(m:movie) WHERE m.name = 'The Godfather'
< RETURN p.name;
---
> MATCH (p:person)-[:directed]->(m:movie) WHERE m.movie.name == 'The Godfather'
> RETURN p.person.name;
---
请使用用户提问的语言回答`;

export const docFinderPrompt = `任务是从以下类别中识别出2个最相关的分类：
\`\`\`categories
{category_string}
\`\`\`
用于回答关于图数据库NGQL查询的问题："{query_str}"，用户历史提问为："{history_str}"。
只需返回逗号分隔的列表如"分类1,分类2"，不需要解释`;

export const text2queryPrompt = `假设你是Graph数据库AI助手，你的职责是帮助用户编写NGQL查询。已知以下信息：
用户图空间Schema：
----
{schema}
----
提供的参考文档：
----
{doc}
----
请用markdown代码块(\`\`\`ngql)包裹NGQL语句，并用用户提问的语言回答`;

export const AgentTask = `你作为Graph AI对话助手，需要帮助用户编写NGQL或解决其他问题。
你可以获取以下信息：
1. 用户当前控制台NGQL上下文：{current_ngql}
2. 用户当前图空间：{space_name}
3. 你上次的记忆内容：{memory}
4. 用户当前问题：{query_str}

你可以使用以下命令获取额外信息（这些信息将加入记忆用于后续回答）：
\\get-doc-categories 获取NebulaGraph文档分类列表
\\get-doc 分类名称 获取指定分类的文档
\\run-ngql ngql命令 执行NGQL语句
\\get-schema 获取当前图空间的Schema
\\finish 结果 完成任务并用用户提问的语言返回结果（如需返回NGQL请用\`\`\`ngql包裹）

请选择命令：`;

export interface LLMConfig {
  url: string;
  apiType: string;
  llmVersion: string;
  key: string;
  features: string[];
  maxContextLength: number;
  enableCopilot: boolean;
  enableLLM2NGQLs: boolean;
  gqlPath: string;
  model: string;
}
class LLM {
  currentInput = '';
  open = false;
  config = {
    maxContextLength: 4096,
    url: 'https://api.openai.com/v1/chat/completions',
    apiType: 'openai',
    model: 'gpt-3.5-turbo',
    features: ['spaceSchema', 'useConsoleNGQL'],
  } as LLMConfig;
  widget: HTMLSpanElement;
  editor: any;
  mode = 'text2ngql' as 'text2ngql' | 'text2cypher';
  completionList: { text: string; type: string }[] = [];
  constructor() {
    makeAutoObservable(this, {
      editor: false,
      widget: false,
    });
  }

  fetchConfig() {
    return get('/api/config/llm')().then((res) => {
      if (res.code !== 0 || !res.data) return;
      const { config, ...values } = res.data.config;
      const configMap = config ? safeParse<LLMConfig>(config) : {};
      this.setConfig({
        ...configMap,
        ...values,
        maxContextLength: values.contextLengthLimit,
        gqlPath: res.data.gqlPath,
      });
      return this.config;
    });
  }

  setConfig(payload: LLMConfig) {
    this.config = { ...this.config, ...payload };
  }

  update(payload: any) {
    Object.assign(this, payload);
  }

  async getSpaceSchema(space: string) {
    const finalPrompt = `The user's current graph space is: ${space} \nschema:\n`;
    if (this.config.features.includes('spaceSchema')) {
      await schema.switchSpace(space);
      await schema.getTagList();
      await schema.getEdgeList();
      const tagList = schema.tagList;
      const edgeList = schema.edgeList;
      let nodeSchemaString = '';
      const edgeSchemaString = '';
      tagList.forEach((item) => {
        nodeSchemaString += `NodeType ${item.name} (${item.fields
          .map((field) => `${field.Field}:${field.Type}`)
          .join(' ')})\n`;
      });
      edgeList.forEach((item) => {
        nodeSchemaString += `EdgeType ${item.name} (${item.fields
          .map((field) => `${field.Field}:${field.Type}`)
          .join(' ')})\n`;
      });
      return finalPrompt + nodeSchemaString + edgeSchemaString;
    }
    return finalPrompt;
  }

  async getAgentPrompt(query_str: string, historyMessages: any, callback: (res: any) => void) {
    //      {current_ngql}
    // 2. The user's current graph space is: {space_name}
    // 3. Your last memory is: {memory}
    // 4. The user's question is: {query_str}
    let memory = '';
    const finish = async (text: string) => {
      if (text.indexOf('\\finish') > -1) {
        return;
      }
      memory = '';
      const command = text.match(/\\([\w|-]+)(\s+([\s\S]*))?/);
      if (command) {
        const [, cmd, , args] = command;
        switch (cmd) {
          case 'get-doc-categories':
            memory += `(get-doc-categories: ${ngqlDoc.NGQLCategoryString})\n`;
            break;
          case 'get-doc':
            memory += `(get-doc : ${ngqlDoc.ngqlMap[args.toLowerCase()]?.content || 'no doc'})\n`;
            break;
          case 'run-ngql':
            // eslint-disable-next-line no-case-declarations
            const res = (await ws.runNgql({ gql: args, space: rootStore.console.currentSpace })) as any;
            memory += `(run-ngql :${JSON.stringify(res?.data?.tables)})\n`;
            break;
          case 'get-schema':
            // eslint-disable-next-line no-case-declarations
            const schema = await this.getSpaceSchema(rootStore.console.currentSpace);
            memory += `(get-schema: ${schema})\n`;
            break;
          default:
            return;
        }
        run();
      }
    };
    const run = async () => {
      let prompt = AgentTask;
      let message = '';
      prompt = prompt.replace('{current_ngql}', rootStore.console.currentGQL);
      prompt = prompt.replace('{space_name}', rootStore.console.currentSpace);
      prompt = prompt.replace('{memory}', memory || 'empty');
      prompt = prompt.replace('{query_str}', query_str);
      console.log(prompt);
      await ws.runChat({
        req: {
          stream: true,
          max_tokens: 20,
          messages: [
            ...historyMessages,
            {
              role: 'user',
              content: prompt,
            },
          ],
        },
        callback: (res) => {
          if ((message.length && message.indexOf('\\') !== 0) || message.indexOf('\\finish') > -1) {
            return callback(res);
          }
          if (res.message.done) {
            finish(message);
            return;
          }
          let text = '';
          // special for qwen api, qwen api will return a hole message
          if (this.config.apiType === 'qwen') {
            text = res.message.output.choices[0].message.content || '';
            if (res.message.output.choices[0].finish_reason === 'stop') {
              finish(message);
              return;
            }
            message = text;
          } else {
            if (res.message.choices?.[0].message === 'stop') {
              finish(message);
              return;
            }
            text = res.message.choices[0].delta?.content || '';
            message += text;
          }
        },
      });
    };
    run();
  }

  async getDocPrompt(text: string, historyMessages: any) {
    let prompt = matchPrompt;
    if (this.mode !== 'text2cypher') {
      text = text.replaceAll('"', "'");
      const history = historyMessages
        .filter((item) => item.role === 'user')
        .map((item) => item.content)
        .join(',');
      const docPrompt = docFinderPrompt
        .replace('{category_string}', ngqlDoc.NGQLCategoryString)
        .replace('{query_str}', text)
        .replace('{history_str}', history)
        .replace('{space_name}', rootStore.console.currentSpace);
      console.log(docPrompt);
      const res = (await ws.runChat({
        req: {
          stream: false,
          max_tokens: 40,
          top_p: 0.8,
          messages: [
            {
              role: 'user',
              content: docPrompt,
            },
          ],
        },
      })) as any;
      if (res.code === 0) {
        let url = '';
        try {
          url = (res.message.choices[0].message?.content as string)?.split('\n')[0];
        } catch {
          throw new Error(JSON.stringify(res.message));
        }
        const paths = url
          .toLowerCase()
          .replaceAll('，', ',') // chinese comma
          .split(',')
          .map((path) => path.replaceAll(/\s|"|\\/g, ''));
        console.log('select doc url:', paths);
        if (paths[0] !== 'sorry') {
          prompt = text2queryPrompt;
          let doc = ngqlDoc.ngqlMap[paths[0]]?.content;
          if (!doc) {
            doc = '';
          }
          const doc2 = ngqlDoc.ngqlMap[paths[1]]?.content;
          if (doc2) {
            doc = (doc + `\n` + doc2).slice(0, this.config.maxContextLength);
          }
          doc = doc.replaceAll(/\n\n\n+/g, '');
          if (doc.length) {
            prompt = text2queryPrompt.replace('{doc}', doc);
          }
        }
      }
    }

    const pathname = window.location.pathname;
    const space = pathname.indexOf('schema') > -1 ? rootStore.schema.currentSpace : rootStore.console.currentSpace;
    if (!space) {
      return prompt.replace('{schema}', 'no space selected');
    }
    let schemaPrompt = await this.getSpaceSchema(space);

    if (this.config.features.includes('useConsoleNGQL')) {
      schemaPrompt += `\nuser console ngql context: ${rootStore.console.currentGQL}`;
    }
    prompt = prompt.replace('{schema}', schemaPrompt);
    console.log(prompt);
    return prompt;
  }

  timer;
  running = false;
  async checkCopilotList(cm: any) {
    clearTimeout(this.timer);
    this.timer = setTimeout(async () => {
      let snippet = '';
      const cursor = cm.getCursor();
      const line = cm.getLine(cursor.line).split(';').pop();
      if (cursor.ch < line.length - 1) return;
      if (line.length < 3) return;
      const tokens = line.split(' ');
      const firstToken = tokens.find((item) => item.replaceAll(' ', '').length > 0);
      const hits = ngqlDoc.ngqlDoc.filter((each) => each.title.toLowerCase().indexOf(firstToken.toLowerCase()) === 0);
      let doc = '';
      if (this.mode === 'text2cypher' && firstToken.toLowerCase() === 'match') {
        doc += matchPrompt;
      } else {
        if (hits.length) {
          hits.find((item) => {
            if (doc.length > this.config.maxContextLength) return true;
            doc += item + '\n';
          });
        }
      }
      if (!doc) {
        return;
      }
      this.running = true;
      cm.closeHint();
      const schema = await this.getSpaceSchema(rootStore.console.currentSpace);
      const res = (await ws.runChat({
        req: {
          temperature: 1.0,
          stream: false,
          presence_penalty: 1.1,
          max_tokens: 30,
          top_p: 0.8,
          top_k: 40,
          messages: [
            {
              role: 'user',
              content: `As a NebulaGraph NGQL code autocomplete copilot, you have access to the following information: document "${doc}" and user space schema "${schema}".
               Use this information to guess the user's next NGQL code autocomplete as accurately as possible.
               Please provide your guess as a response without any prefix words.
               Don't explain anything.
               the next autocomplete text can combine with the given text.
               if you can't guess, say "Sorry",
               The user's NGQL text is: ${line}
               the most possible  2 next autocomplete text is:`,
            },
          ],
        },
      })) as any;
      if (res.code === 0) {
        snippet = res.message.choices[0].message?.content;
        console.log(snippet);
        if (snippet.indexOf('Sorry') > -1) {
          snippet = '';
        }
      }
      if (snippet) {
        this.update({
          completionList: snippet
            .split('\n')
            .map((each) => ({
              type: 'copilot',
              text: each,
            }))
            .filter((item) => item.text !== ''),
        });
      }
      this.running = false;
    }, 3000);
  }
}

export default new LLM();
