import os 
import csv 
from io import StringIO

from fastapi import FastAPI
from pydantic import BaseModel
import uvicorn
from langchain.chat_models import AzureChatOpenAI
from langchain.chains import LLMChain
from langchain.prompts import ChatPromptTemplate

api_type = "azure"
api_base = "https://aism.openai.azure.com/"
api_version = "2023-05-15"
api_key = '9f0d091886514a02b4178aa0734201db'
model_name = 'aism-gpt4-32'

model = AzureChatOpenAI(
    openai_api_base=api_base,
    openai_api_version=api_version,
    deployment_name=model_name,
    openai_api_key=api_key,
    openai_api_type=api_type,
    temperature=0.5,
)

app = FastAPI()

def load_qa_pairs(text):
    f = StringIO(text)
    reader = csv.reader(f, delimiter=',')
    next(reader)  # Skip the header
    return [(row[0], row[1]) for row in reader]

def format_qa_pairs(qa_pairs):
    formatted_text = ""
    for pair in qa_pairs:
        formatted_text += f"Q: {pair[0]}\nA: {pair[1]}\n\n"
    return formatted_text

class Item(BaseModel):
    text: str

@app.post("/process_qa_gen")
async def Process_QA_Generator(item: Item):
    """
    问答对生成的Prompt调用
    """
    prompt = ChatPromptTemplate.from_template('你是一个语言专家,请将下面的文本内容转为问答对,且只需要输出csv格式的内容: {product}')
    chain = LLMChain(llm=model, prompt=prompt)
    context = item.text
    output =chain.run(product=context)
    output = format_qa_pairs(load_qa_pairs(output))
    
    return {"text": "生成问答对如下: \n   \n"+ output}


def is_csv(output):
    try:
        dialect = csv.Sniffer().sniff(output)
        return dialect.delimiter == ',' and dialect.quotechar == '"'
    except csv.Error:
        return False
    


if __name__ == "__main__":
    uvicorn.run('api:app', host="0.0.0.0", port=8001,reload=True)