import { Language, RoleMessageType, RoleType } from "../enum";
import { BaseMessage } from "../message/base";
import { ChatMessage } from "../message/chatMessage";
import { SystemMessage } from "../message/systemMessage";
import { ModelBackend, ModelBackendFactory, ModelBackendConfig } from "../model";
import { personConfigs } from "../config";
import { BaseAgent } from "./base";

export interface ChatAgentConfig {
    /** 系统消息，指定角色名称，角色类型，人设信息 */
    systemMessage: SystemMessage;
    /** 记忆 */
    memory?: any;
    /** 模型配置，留空则为默认 */
    modelConfig?: ModelBackendConfig;
    /** 最多包含多少条先前消息，默认 1 (一个SystemMessage为一条消息) */
    messageWindowSize?: number;
    /** 语言，模型使用的语言，默认中文 */
    language?: Language;
}

export interface RawChatAgentConfig extends Omit<ChatAgentConfig, "systemMessage"> {}

interface ChatAgentResponseInfo {
    /** 本轮消息token数量 */
    tokenNumber: number;
    /** 终止原因 */
    terminationReasons: string[];
}

export class ChatAgentResponse {
    /** 输出消息 */
    public messages: BaseMessage[];
    /** 是否终止 */
    public terminated: boolean;
    /** 信息 */
    public info: ChatAgentResponseInfo;
    constructor(messages: ChatMessage[], terminated: boolean, info: ChatAgentResponseInfo) {
        this.messages = messages;
        this.terminated = terminated;
        this.info = info;
    }
}

export class ChatAgent implements BaseAgent {
    public roleName: string;
    public roleType: RoleType;
    public systemMessage: SystemMessage;
    public memory: any;
    public modelConfig: ModelBackendConfig;
    public messageWindowSize: number;
    public language: Language;

    /** 模型后端 */
    public modelBackend: ModelBackend;
    /** 模型token数量限制 */
    public modelTokenLimit: number;
    /** 是否终止 */
    public terminated: boolean = false;
    /**
     * 存储的消息
     *
     * @description
     *
     * 第一个元素为系统消息 {@link SystemMessage}，传入模型人设信息。
     * 之后的消息为 {@link ChatMessage}，传入模型对话信息。
     * 奇数索引为 {@link SendingChatMessage} 表示 Agent 发送的消息；
     * 偶数索引为 {@link ReceivingChatMessage} 表示 Agent 接收的消息。
     */
    public storedMessages: ChatMessage[] = [];

    constructor(config: ChatAgentConfig) {
        this.roleName = config.systemMessage.roleName;
        this.roleType = config.systemMessage.roleType;
        this.systemMessage = config.systemMessage;
        this.memory = config.memory;
        this.modelConfig = config.modelConfig ?? personConfigs.defaultModelBackendConfig;
        this.messageWindowSize = config.messageWindowSize ?? 2;
        this.language = config.language ?? Language.Chinese;

        this.modelBackend = ModelBackendFactory.create(this.modelConfig);
        this.modelTokenLimit = this.modelBackend.getTokenLimit();
    }

    initMessages() {
        this.storedMessages = [this.systemMessage];
    }
    updateMessage(message: ChatMessage) {
        if (this.storedMessages.length === 1) {
            if (message.roleMessageType !== RoleMessageType.AgentReceiving) {
                throw new Error(`First message must be user message: ${message.roleMessageType}`);
            }
        }
        if (this.storedMessages.slice(-1)[0].roleMessageType === message.roleMessageType) {
            throw new Error(`Duplicate message type: ${message.roleMessageType}`);
        }
        this.storedMessages.push(message);
        return this.storedMessages;
    }
    useMemory(inputMessage: ChatMessage, memoryDepth: number = 1): BaseMessage[] {
        if (!this.memory) {
            return [];
        }
        throw new Error("Memory not supported.");
    }
    reset(): ChatMessage[] {
        this.terminated = false;
        this.storedMessages = [this.systemMessage];
        return this.storedMessages;
    }
    async step(inputMessage: ChatMessage): Promise<ChatAgentResponse> {
        const inputMessages = this.updateMessage(inputMessage);
        if (inputMessages.length > this.messageWindowSize) {
            inputMessages.splice(
                0,
                inputMessages.length -
                    this.messageWindowSize +
                    ((inputMessages.length - this.messageWindowSize) % 2 || 1),
                this.systemMessage
            );
        }

        const tokenNumber = this.modelBackend.getTokenNumberFromMessage(inputMessages);

        if (tokenNumber > this.modelTokenLimit) {
            this.terminated = true;
            const info = this.getInfo(tokenNumber, ["max_tokens_exceeded"]);
            return new ChatAgentResponse(inputMessages, true, info);
        } else {
            try {
                const outputMessage = await this.modelBackend.run(inputMessages);
                if (!outputMessage) {
                    this.terminated = true;
                    const info = this.getInfo(tokenNumber, ["no_response"]);
                    return new ChatAgentResponse(inputMessages, true, info);
                } else {
                    const outputMessages = this.updateMessage(outputMessage);
                    const info = this.getInfo(tokenNumber, []);
                    return new ChatAgentResponse(outputMessages, false, info);
                }
            } catch (error) {
                this.terminated = true;
                if (error instanceof Error) {
                    const info = this.getInfo(tokenNumber, [error.message]);
                    return new ChatAgentResponse(inputMessages, true, info);
                } else {
                    const info = this.getInfo(tokenNumber, ["unknown_error"]);
                    return new ChatAgentResponse(inputMessages, true, info);
                }
            }
        }
    }
    getInfo(tokenNumber: number, terminationReasons: string[]): ChatAgentResponseInfo {
        return {
            tokenNumber,
            terminationReasons
        };
    }
    getLatestMessage(): ChatMessage {
        return this.storedMessages[this.storedMessages.length - 1];
    }
    isExceedTokenLimit(message: string): boolean {
        const tokenLimit = this.modelBackend.getTokenLimit();
        const tokenCount = this.modelBackend.getTokenNumberFromMessage(message);
        return tokenCount > tokenLimit;
    }
}
