
from openai import OpenAI
import requests
import json
class SiliconFlowLLM:
    def __init__(self, model=DashscopeApi.model, api_key=DashscopeApi.api_key,
                 base_url=DashscopeApi.base_url):
        self.api_key = api_key
        self.base_url = base_url
        self.base_url2 = base_url+'/chat/completions'
        self.model = model
        self.client = OpenAI(api_key=self.api_key, base_url=self.base_url)

    def get_models(self):
        #import requests

        url = "https://api.siliconflow.cn/v1/models"

        headers = {"Authorization": f"Bearer {self.api_key}"}

        response = requests.request("GET", url, headers=headers)


        return response.json()

    def start_new_LLM(self,model):
        return SiliconFlowLLM(model=model,api_key=self.api_key,base_url=self.base_url)


    def chat(self, prompt, stream=True, temperature=0.1):
        response = self.client.chat.completions.create(
            model=self.model,
            messages=[
                {'role': 'user', 'content': prompt}
            ],
            temperature=temperature,
            stream=stream
        )
        if stream:
            res = ''
            for chunk in response:
                new = chunk.choices[0].delta.content
                if new:
                        #print(new, end='')
                    res += new
            return res
        else:
            return response.choices[0].message.content

    def httpchat(self, prompt, stream=True, temperature=0.1):
        headers = {
            "Content-Type": "application/json",
            "Authorization": f"Bearer {self.api_key}"
        }
        data = {
            "model": self.model,
            "messages": [
              #  {"role": "system", "content": "你是 Kimi，由 Moonshot AI 提供的人工智能助手。"},
                {"role": "user", "content": prompt}
            ],
            "temperature": temperature,
            "stream": stream
        }
        if stream:
            try:
                response = requests.post(self.base_url2, json=data, headers=headers, stream=True)
                response.raise_for_status()
                for chunk in response.iter_lines():
                    c = chunk.decode('utf-8').strip()[6:]
                    #print(c)
                    if 'choices' in c:
                        d = json.loads(c)
                        if d['choices']:
                            if 'delta' in d['choices'][0]:
                                #print(d['choices'][0]['delta'].keys())
                                if 'reasoning_content' in d['choices'][0]['delta']:
                                    cont = d['choices'][0]['delta']['reasoning_content']
                                    print(cont,end='')
                                if 'content' in d['choices'][0]['delta']:
                                    cont = d['choices'][0]['delta']['content']
                                    print(cont,end='')
            except Exception as e:
                print(e)
                return f"An error occurred: {e}"
        else:

            try:
                response = requests.post(self.base_url2, json=data, headers=headers)
                response.raise_for_status()
                completion = response.json()
                print(completion)
                return completion['choices'][0]['message']['content']
            except Exception as e:
                return f"An error occurred: {e}"
