import pandas as pd
import numpy as np
from agent.llm_agent import LLMAgent
from langchain import FAISS, OpenAI
import asyncio
import openai
import json
import time
from concurrent.futures import ThreadPoolExecutor



import json
import tiktoken # for token counting
import numpy as np
from collections import defaultdict
import datetime
# format={"messages": [{"role": "system", "content": "You are a happy assistant that puts a positive spin on everything."}, {"role": "user", "content": "I fell off my bike today."}, {"role": "assistant", "content": "It's great that you're getting exercise outdoors!"}]}


import os
os.environ['OPENAI_API_KEY'] = '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' # fill me in



import pymysql

def get_conn():
    conn = pymysql.connect(host='lingxiai.rwlb.zhangbei.rds.aliyuncs.com', port=3306, user='offline_gpt',passwd="Moxi123#",database="offline_gpt") #内
    return conn

def read_data(data_sql,conn):
    start_time=time.time()
    try:
        df=pd.read_sql(data_sql,conn)
    except Exception as e:
        content = "new_renren_label_recognize read mysql error:{}".format(e)
        send_wechat_warning(content=content, window='renren_label', level='new_renren_label_recognize')
        df=pd.DataFrame()
    print('pd.read mysql use time:', time.time()-start_time)
    return df


def push_result_to_mysql(result_dict):
    conn=get_conn()
    # 创建游标对象
    cursor = conn.cursor()
    # SQL 插入语句
    insert_sql='''INSERT INTO offline_gpt.result_voice_text_result(case_id,company_id,customer_id,session_id,voice_text,call_start_time,res_result1,res_result2,res_result3,res_result4,res_result,create_time) 
    values ({}, {}, '{}', '{}','{}','{}','{}','{}','{}','{}','{}','{}')'''.format(result_dict['case_id'],result_dict['company_id'],result_dict['customer_id'],result_dict['session_id'],result_dict['voice_text'],result_dict['call_start_time'],
    result_dict['res_result1'],result_dict['res_result2'],result_dict['res_result3'],result_dict['res_result4'],result_dict['res_result'],datetime.datetime.now())

    try:
        # 执行sql语句
        cursor.execute(insert_sql)
        # 提交到数据库执行
        conn.commit()
        print("{}: insert success ".format(result_dict['session_id']))
    except Exception as e:
        print("{}:{}".format(result_dict['session_id'],e))
        # 如果发生错误则回滚
        conn.rollback()
    conn.close()

   

def format_check(data_path=''):
    # Load the dataset
    with open(data_path, 'r', encoding='utf-8') as f:
        dataset = [json.loads(line) for line in f]

    # Format error checks
    format_errors = defaultdict(int)
    for ex in dataset:
        if not isinstance(ex, dict):
            format_errors["data_type"] += 1
            continue

        messages = ex.get("messages", None)
        if not messages:
            format_errors["missing_messages_list"] += 1
            continue

        for message in messages:
            if "role" not in message or "content" not in message:
                format_errors["message_missing_key"] += 1

            if any(k not in ("role", "content", "name", "function_call") for k in message):
                format_errors["message_unrecognized_key"] += 1

            if message.get("role", None) not in ("system", "user", "assistant", "function"):
                format_errors["unrecognized_role"] += 1

            content = message.get("content", None)
            function_call = message.get("function_call", None)

            if (not content and not function_call) or not isinstance(content, str):
                format_errors["missing_content"] += 1

        if not any(message.get("role", None) == "assistant" for message in messages):
            format_errors["example_missing_assistant_message"] += 1

    if format_errors:
        print("Found errors:")
        for k, v in format_errors.items():
            print(f"{k}: {v}")
    else:
        print("No errors found")


def write_to_jsonl(samples,file_path=''):
    # Load the dataset
    with open(file_path, 'a', encoding='utf-8') as f:
        for sample in samples:
            f.write(json.dumps(sample, ensure_ascii=False) + "\n")


def token_count(content):
    encoding = tiktoken.get_encoding("cl100k_base")
    token_count=len(encoding.encode(content))
    # print("token count: ", token_count)
    return token_count


#split_content function
def split_content(content,every_content_length=2500,window_dialogue=2):
    content_len = token_count(content)
    if content_len<=every_content_length:
        return [content]
    else:
        content_num=content_len//every_content_length+1
        dialogue_list=content.split('\n')
        res_content_list=[]
        for i in range(content_num):
            if i>0:
                res_content=res_content[-window_dialogue:]
            else:
                res_content=[]
            while(token_count('\n'.join(res_content))<=every_content_length):
                if len(dialogue_list)==0:
                    break
                res_content.append(dialogue_list.pop(0))
            res_content_list.append('\n'.join(res_content))
        return res_content_list

def merge_result(dict_list):
    res_dict=dict_list[0]
    if len(dict_list)>1:
        for dic in dict_list[1:]:
            new_keys=[k for k in dic.keys() if k not in res_dict.keys()]
            # add new k v
            for new_k in new_keys:
                res_dict[new_k]=dic[new_k]
            # update k v
            for k in res_dict.keys():
                if res_dict[k] in ['否','不确定'] and dic[k]=='是':
                    res_dict[k]='是'
        return res_dict
    else:
        return res_dict



class AsyncExecutor:
    def __init__(self, max_workers=10):
        self.executor = ThreadPoolExecutor(max_workers)
    # 异步执行函数
    async def async_exec(self,result):
        loop = asyncio.get_event_loop()
        # 使用run_in_executor来在线程池中执行任务
        await loop.run_in_executor(self.executor, push_result_to_mysql, result)
    # 创建一个事件循环
    async def main(self,results):
        tasks = []
        for result in results:
            task = asyncio.create_task(self.async_exec(result))
            tasks.append(task)
        # 等待所有任务完成
        await asyncio.gather(*tasks)

