
from my_py_toolkit.file.file_toolkit import *
from my_py_toolkit.data_clean.remove_duplicates_files import remove_duplicates_files
from collections import Counter
from shutil import move
from tqdm import tqdm
import traceback

def remove_empty_value_in_dict(data):
    result = {k:v for k,v in data.items() if v}
    return result

def handle_label_data4compare(label_data):
    # 1、删除空值
    result = {}
    for k,vs in label_data.items():
        cur = [remove_empty_value_in_dict(v) for v in vs if v['年龄']]
        result[k] = cur
    return result

def read_labels(label_dirs):
    result = {}
    for d in label_dirs:
        for file in get_file_paths(d):
            if file.endswith('_value.json'):
                fn = get_file_name(file)
                id = int(fn.split('_')[0])
                label = handle_label_data4compare(readjson(file))
                result[id] = json.dumps(label, ensure_ascii=False)
    return result

def get_id(file):
    try:
        fn = get_file_name(file)
        id = int(fn.split('_')[0])
        return id
    except:
        print(f'Get id failed : {file}, {traceback.format_exc()}')
        return -1
    
def dict2str(data):
    result = ''
    for k,v in data.items():
        result += f'{k}: {v}\n'
    return result

def handle_desc(data):
    # keys = ['适应症', '老人用药', '儿童用药', '禁忌', '用法用量', '注意事項', '药品名称']
    keys = ['适应症', '老人用药', '儿童用药', '禁忌', '用法用量', '注意事項']
    result = {}
    # result['药品名称'] = data['药品名称']
    for k,v in data.get('说明书', {}).items():
        if k in keys:
            result[k] = v

    result = dict2str(result)
    return result

if __name__ == '__main__':
    data_path = r'E:\gitee\test_code2\project\video_wenzhen\yaowuzhikong\datas\网院开过的药品数据_no_dup.json'
    ori_dirs = [r'E:\Work\项目\视频问诊\药方质控\已标注数据\正样本']
    save_dir = r'E:\Work\项目\视频问诊\药方质控\已标注数据\去重\正样本_desc_dup'

    os.makedirs(save_dir, exist_ok=True)
    datas = readjson(data_path)
    datas = {int(data['id']): data for data in datas}
    labels = read_labels(ori_dirs)
    datas_label = {id:datas[id] for id in labels}
    remove_duplicates_files(datas_label, handle_desc, get_id, ori_dirs, save_dir, 10)