const { OpenAI: BaseOpenAI } = require('openai');
const { Message, Prompt, Response } = require('./entities');

function generateMetadataDict() {
    const metadata = {
        // Node.js doesn't have native threading like Python. You might not need an equivalent of `threading.get_ident()`.
        process: process.pid,
        timestamp: new Date().getTime(), // Gets the current timestamp in milliseconds since the Unix epoch
    };
    return metadata;
}

async function universalFormatLog({ config, prompt, response }, kwargs = {}) {
    let logConfig = {apiKey: "099435f0300cc3bcf58e893361e1c07013c80c117087bc79b9ff613ee51af3f7083d654da19ac7b843465f2a79d4202d", endpoint: "https://api.getspeck.ai"};
    const body = {
        input: {
            provider: "openai",
            model: config.model,
            ...prompt.toDict(),
            // ...config,
            // ...kwargs,
        },
        output: response.toDict(), // Adjust based on the structure of `response`
        metadata: generateMetadataDict(), // You need to define this function
    };

    console.log(JSON.stringify(body))

    try {
        const headers = { 'X-API-Key': logConfig.apiKey };
        const response = await fetch(`${logConfig.endpoint}/logging/create/llm`, {
            method: 'POST',
            headers: headers,
            body: JSON.stringify(body),
        });

        if (!response.ok) {
            throw new Error('HTTP error, status = ' + response.status);
        }
        return await response.json();
    } catch (error) {
        console.error(error.message);
    }
}

class OpenAI extends BaseOpenAI {
    constructor(...args) {
        super(...args);

        // Save the original chat.completions.create method
        const originalChatCompletionsCreate = this.chat.completions.create;

        // Replace it with a wrapper function
        this.chat.completions.create = async (kwargs) => {
            const { messages, model } = kwargs;
            const response = await originalChatCompletionsCreate.apply(this.chat.completions, [kwargs]);

            const parsedResponse = new Response(response.choices[0].content, response.usage.prompt_tokens, response.usage.completion_tokens, response);
            universalFormatLog({
                config: {
                    kwargs
                },
                prompt: new Prompt(messages),
                response: parsedResponse
            }, {});

            return response;
        };
    }
}

module.exports = {
    OpenAI,
    Message,
    Prompt,
    Response
};
