//SPDX-FileCopyrightText: Copyright 2025-2025 深圳市同心圆网络有限公司
//SPDX-License-Identifier: GPL-3.0-only

import { type Channel, invoke } from "@tauri-apps/api/core";
import { fetch } from '@tauri-apps/plugin-http';

//定义api返回数据
export interface ApiError {
    id: string;
    message: string;
    type: string;
    code: string;
}

export interface LlmModelInfo {
    id: string;
    created: number;
    owned_by: string;
}

export interface ApiResponse {
    error?: ApiError;
    data?: LlmModelInfo[] | unknown;
    object?: "list" | "object",
}

//定义chat数据类型
export interface ChatContentText {
    type: "text",
    text: string;
}

export interface ChatContentImage {
    type: "image_url",
    image_url: {
        url: string;
    },
}

export type ChatContent = ChatContentText | ChatContentImage | string;

export interface ChatMsg {
    role: string;
    content: ChatContent;
}

export interface ChatMsgChunkData {
    success: boolean;
    value: string;
}

export interface ChatChunkResponse {
    id: string;
    object: string;
    choices: {
        index: number;
        delta: {
            role: string;
            content: ChatContent;
        };
        finish_reason: null | string;
    }[];
}

export async function list_llm_model(apiAddr: string, apiKey: string): Promise<ApiError | LlmModelInfo[]> {
    const headers = [["Origin", ""]] as [string, string][]; //fix http with unsafe-header return 403
    if (apiKey != "") {
        headers.push(["Authorization", `Bearer ${apiKey}`]);
    }
    const res = await fetch(`${apiAddr}/v1/models`, {
        method: "GET",
        headers: headers,
    });

    if (res.status == 200) {
        const jsonData = await res.json();
        const apiRes = jsonData as ApiResponse;
        if (apiRes.error !== undefined) {
            return apiRes.error;
        } else if (apiRes.data !== undefined) {
            return apiRes.data as LlmModelInfo[];
        }
    } else {
        const textData = await res.text();
        console.log(textData);
    }
    return [];
}

export async function chat_completion(defaultAddr: string, apiKey: string, model: string, msgs: ChatMsg[], onEvent: Channel<ChatMsgChunkData>) {
    await invoke("llm_cmds_chat_completion", {
        addr: `${defaultAddr}/v1/chat/completions`,
        apiKey: apiKey,
        body: JSON.stringify({
            model: model,
            messages: msgs,
            stream: true,
        }),
        onEvent: onEvent,
    });
}