from ..interface.protocol.model_protocol import ModelProtocol
from openai import OpenAI
import os
from dotenv import load_dotenv
load_dotenv()

class Sql_LLM(ModelProtocol):

    def __init__(self):
        deepseek_api_key = os.getenv("DEEPSEEK_API_KEY")
        deepseek_base_url = os.getenv("DEEPSEEK_BASE_URL")

        self.client = OpenAI(
            base_url=deepseek_base_url,
            api_key=deepseek_api_key
        )
        self.messages = [
            {
                "role": "system",
                "content": (
                    "You are an advanced SQL generation tool specialized in SQLite.\n"
                    "❌ Never engage in conversations."
                    "❌ Never explain, comment, or wrap the SQL in any rich text or formatting."
                    "✅ Output must be raw SQL only, no natural language."
                    "✅ All generated SQL must comply with SQLite syntax strictly."
                    "✅ Fully utilize JOINs, WHERE, GROUP BY, ORDER BY, and aggregate functions if needed."
                    "DO NOT generate any FOREIGN KEY constraint or reference. All SQL must avoid using FOREIGN KEY."
                    "8. DO NOT output any rich text, formatting, markdown, backticks, quotes, emojis, or any other decorations. Output must ONLY be pure, executable SQLite SQL statements."
                    "✅ You will receive 'True' or 'False' as assistant messages indicating the execution result of your last generated SQL. Adjust your next SQL generation based on that result if necessary."
                )
            }
        ]




        # print("sql_llm init")

    def process_input(self, input_data):
        try:
            self.messages.append({"role": "user", "content": input_data})
            
            completion = self.client.chat.completions.create(
                model="deepseek-chat",
                messages=self.messages,
                temperature=0.2,
                top_p=0.5,
                max_tokens=4096,
                stream=False
            )
            assistant_reply = completion.choices[0].message.content
            # print(f"Assistant: {assistant_reply}")
            
            return assistant_reply

        except Exception as e:
            print(e)
            return 0

    def get_model_info(self,back_data):
        """
        获取模型的基本信息，如名称、版本等。
        :return: 字典格式的模型信息
        """
        # print(back_data)
        self.messages.append({"role": "assistant", "content": back_data})
        return