# -*- coding: utf-8 -*-
# Copyright 2023 OpenSPG Authors
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
# in compliance with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software distributed under the License
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
# or implied.


import json
from typing import Union
from openai import OpenAI
import logging

from kag.common.llm.client.llm_client import LLMClient
from kag.common.llm.config import OpenAIConfig

# logging.basicConfig(level=logging.DEBUG)
logger = logging.getLogger(__name__)


class OpenAIClient(LLMClient):
    """
    A client class for interacting with the OpenAI API.

    Initializes the client with an API key, base URL, streaming option, temperature parameter, and default model.

    Parameters:
        api_key (str): The OpenAI API key.
        base_url (str): The base URL of the API.
        stream (bool, optional): Whether to process responses in a streaming manner. Default is False.
        temperature (int, optional): Sampling temperature to control the randomness of the model's output. Default is 0.7.
        model (str, optional): The default model to use.

    Attributes:
        api_key (str): The OpenAI API key.
        base_url (str): The base URL of the API.
        model (str): The default model to use.
        stream (bool): Whether to process responses in a streaming manner.
        temperature (float): Sampling temperature.
        client (OpenAI): An instance of the OpenAI API client.
    """
    def __init__(
            self,
            llm_config:OpenAIConfig
    ):
        # Initialize the OpenAIClient object
        self.api_key = llm_config.api_key
        self.base_url = llm_config.base_url
        self.model = llm_config.model
        self.stream = llm_config.stream
        self.temperature = llm_config.temperature
        self.client = OpenAI(api_key=self.api_key, base_url=self.base_url)


    def __call__(self, prompt:str, image_url:str=None):
        """
        Executes a model request when the object is called and returns the result.

        Parameters:
            prompt (str): The prompt provided to the model.

        Returns:
            str: The response content generated by the model.
        """
        # Call the model with the given prompt and return the response
        if image_url:
            message = [
                {"role": "system", "content": "you are a helpful assistant"},
                {"role": "user", "content": [
                    {
                        "type": "text",
                        "text": prompt
                    },
                    {
                        "type": "image_url",
                        "image_url": {
                        "url": image_url
                        }
                    }
                    ]
                },
            ]
            response = self.client.chat.completions.create(
                model=self.model,
                messages=message,
                stream=self.stream,
                temperature=self.temperature,
            )
            rsp = response.choices[0].message.content
            return rsp
            
        else:
            message = [
                {"role": "system", "content": "you are a helpful assistant"},
                {"role": "user", "content": prompt},
            ]
            response = self.client.chat.completions.create(
                model=self.model,
                messages=message,
                stream=self.stream,
                temperature=self.temperature,
            )
            rsp = response.choices[0].message.content
            return rsp

    def call_with_json_parse(self, prompt):
        """
        Calls the model and attempts to parse the response into JSON format.

        Parameters:
            prompt (str): The prompt provided to the model.

        Returns:
            Union[dict, str]: If the response is valid JSON, returns the parsed dictionary; otherwise, returns the original response.
        """
        # Call the model and attempt to parse the response into JSON format
        rsp = self(prompt)
        _end = rsp.rfind("```")
        _start = rsp.find("```json")
        if _end != -1 and _start != -1:
            json_str = rsp[_start + len("```json"): _end].strip()
        else:
            json_str = rsp
        try:
            json_result = json.loads(json_str)
        except:
            return rsp
        return json_result