from openai import OpenAI
import os, json, sys
from tqdm import tqdm

import asyncio
import time
from asyncio import Semaphore

from multiprocessing import Pool

# MAX_TASKS = 20
# sem = Semaphore(MAX_TASKS)

client = OpenAI(
            # This is the default and can be omitted
            api_key="sk-L8O4nOxQWZUmSwaF1b8280Bc9e954249B1Ca6f1e604eEcB2",    #   在此处填写API_KEY
            base_url='https://api.ai-gaochao.cn/v1',
            # 备用API请求地址：https://mtu2.mtuopenai.xyz/v1
        )



def gpt(messages) -> str:
    start_time = time.time()
    try:
        completion = client.beta.chat.completions.parse(
                            messages=messages['messages'],
                            model="gpt-4o-2024-08-06",
                            response_format={"type": "json_object"},
                            temperature=0,
                            timeout=(60,60)
                            )
        response = completion.choices[0].message.content
    except:
        response = ''
    # print(response)
    print(f'耗时{time.time()-start_time}.')
    
    # except:
    # try:
    #     completion = client.beta.chat.completions.parse(
    #                     messages=messages['messages'],
    #                     model="gpt-4o-2024-08-06",
    #                     # response_format={"type": "json_object"},
    #                     temperature=0.01,
    #                     timeout=(5,5)
    #                     )
    #     response = completion.choices[0].message.content
    #     print(f'耗时{time.time()-start_time}.')
    # except:
    #     response = ''
    #     print(f'耗时{time.time()-start_time}. parse失败. ')
    
    return response

def multiprocessing_gpt(messages:list):
    pool_size = os.cpu_count()  # 获取CPU核心数
    pool = Pool(processes=min(pool_size, 32))
    # 使用进程池映射函数到URL列表
    results = pool.map(gpt, messages)
    pool.close()
    pool.join()
    return results


def main(path, save_dir):
    basename = os.path.basename(path)
    save_path = os.path.join(save_dir, basename)

    # 统计数量
    exist_nums = 0
    if not os.path.exists(save_path):
        pass
    else:
        with open(save_path,'r') as f:
            for i in f.readlines():
                exist_nums += 1

    messages = []
    with open(path, 'r', encoding='utf-8') as f:
        for line in f.readlines():
            message = json.loads(line)
            messages.append(message)

    messages = messages[exist_nums:1500]
    # print('message数量:',len(messages))
    group = 64
    batch_messages = [messages[i:i+group] for i in range(0, len(messages), group)]
    # print('batch_messages',len(batch_messages))

    for sub_messages in tqdm(batch_messages, desc=f'{path}|{group}个一组infer'):
        responses = multiprocessing_gpt(sub_messages)
        with open(save_path, 'a+') as f:
            for message, response in zip(sub_messages, responses):
                json.dump({**message, "response":response}, f, ensure_ascii=False)
                f.write('\n')

def eval(paths, badcase_path:None):
    TP = 0
    total = 0
    parse_failed = 0
    std_answers = {'A':1,
                   'B':2,
                   'C':3,
                   'D':0}
    badcase = []
    for path in paths:
        with open(path, 'r', encoding='utf-8') as f:
            for line in f.readlines():
                line = json.loads(line)
                try:
                    response = json.loads(line['response'])
                    answer = response['answer']
                    for sa in std_answers.keys():
                        if sa in answer:
                            answer = std_answers[sa] 
                            break 
                except:
                    parse_failed += 1 
                    # print(line['response'])
                    continue 
                total += 1
                if (len(line['gt_trans']) >= answer > 0 ) or (len(line['gt_trans'])==0 and answer == 0):
                    TP += 1 
                # if len(line['gt_trans']) == answer:
                #     TP += 1
                else:
                    line['gt_and_gpt'] = (line['gt_trans'], answer)
                    badcase.append({k:v for k,v in line.items() if k in ['pre','suf','file','response','gt_and_gpt']})
                    
    if badcase_path:
        with open(badcase_path, 'w+', encoding='utf-8') as f:
            for b in badcase:
                json.dump(b, f, ensure_ascii=False)
                f.write('\n')
            f.write(f'TP:{TP} | total: {total} | acc:{round(TP/total, 4)} | parse_failed:{parse_failed}')
    
    print(f'TP:{TP} | total: {total} | acc:{round(TP/total, 4)} | parse_failed:{parse_failed}')

if __name__ == '__main__':
    dirs = [
        '/mnt/public/linzhixin/data/sc-filter/1209/web',
                '/mnt/public/linzhixin/data/sc-filter/1209/pdf']
    save_dirs = [
        '/mnt/public/linzhixin/data/sc-filter-gpt/1209/web',
                 '/mnt/public/linzhixin/data/sc-filter-gpt/1209/pdf'
                 ]
    
    # dirs = ['/mnt/public/linzhixin/data/sc-filter/1209/web-human',
    #         # '/mnt/public/linzhixin/data/sc-filter/1209/web-human-2',
    #             '/mnt/public/linzhixin/data/sc-filter/1209/pdf-human',
    #             ]
    # save_dirs = ['/mnt/public/linzhixin/data/sc-filter-gpt/1209/web-human',
    #             #  '/mnt/public/linzhixin/data/sc-filter-gpt/1209/web-human-2',
    #              '/mnt/public/linzhixin/data/sc-filter-gpt/1209/pdf-human'
    #              ]
    
    try:
        [os.makedirs(dir) for dir in save_dirs if not os.path.exists(dir)]
    except:
        pass
    
    for dir, save_dir in zip(dirs, save_dirs):
        paths = [os.path.join(dir,name) for name in os.listdir(dir)]

        for path in paths:
            print(path)
            main(path, save_dir)
            
    # trans_path = ['/mnt/public/linzhixin/data/sc-filter-gpt/1209/web-human/trans_1.jsonl',
    #           '/mnt/public/linzhixin/data/sc-filter-gpt/1209/pdf-human/trans_1.jsonl']
    # badcase_path = '/mnt/public/linzhixin/data/sc-filter-gpt/1209/trans_badcase.jsonl'
    # eval(trans_path, badcase_path)

    # trans_path = ['/mnt/public/linzhixin/data/sc-filter-gpt/1209/web-human/no-trans_1.jsonl',
    #             '/mnt/public/linzhixin/data/sc-filter-gpt/1209/pdf-human/no-trans_1.jsonl']
    # badcase_path = '/mnt/public/linzhixin/data/sc-filter-gpt/1209/no_trans_badcase.jsonl'
    # eval(trans_path, badcase_path)
