import pypinyin
from pypinyin import pinyin
import re
import json
import os


def get_py_letters(cn_str):
    pinyin_list  = pinyin(cn_str, style=pypinyin.FIRST_LETTER)
    first_letters = [word[0].upper() for word in pinyin_list]
    return ''.join(first_letters)
    

def get_pinyin_filenames(kb_root_path):
    all_json_files = []
    pinyin_filenames = []
    
    for root, _, files in os.walk(kb_root_path):
        all_json_files.extend(os.path.join(root, file) for file in files if file.endswith(".json"))
    
    for filename in all_json_files:
        filename = filename.replace(kb_root_path, "")
        filename = os.path.splitext(re.sub(r'^[^\u4e00-\u9fa5]+', '', filename))[0]
        filename = filename.replace('\\', '-')
        filename = filename.replace('/', '-')
        
        first_letters = get_py_letters(filename)
        pinyin_filenames.append(first_letters)
    
    return pinyin_filenames, all_json_files
    

def check_tranform_json_keys(data, py_id, upper_key=None, len_=[], level=0):
    if isinstance(data, dict):
        for key in data:
            # print(" " * level*4 + key)
            if key=='id':
                old_id_val = data[key]
                if not old_id_val==py_id:
                    upper_key_letters = get_py_letters(upper_key)
                    gid = str(len_.pop(0))
                    new_id_val = py_id + '-' + upper_key_letters + '-G' + gid
                    data[key] = new_id_val
                    
            if not isinstance(data[key], str):
                _ = check_tranform_json_keys(data[key], py_id, key, len_, level + 1)
    
    elif isinstance(data, list):
        len_ = list(range(len(data)))
        for item in data:
            _ = check_tranform_json_keys(item, py_id, upper_key, len_, level)
    return data


def check_cls_consistency(tec_cls_path, split_char=';'):
    def file_test(line, inconsistent):
        line = line.strip().split(split_char) 
        if not all(l=='' for l in line):
            tec_path = os.path.join(KB_PATH, '\\'.join(line))
            try:
                with open(tec_path) as f_:
                    f_.close()
            except:
                inconsistent += 1
                print('the class between ontology and directory is NOT consistent', tec_path)
        return inconsistent
                    
    inconsistent = 0
    with open(tec_cls_path, encoding='utf-8') as f:
        for line in f:
            if '/' in line: # when there are multiple files for the annotation
                lines = line.split( '/'  )
                for line_ in lines:
                    inconsistent = file_test(line_, inconsistent)
            else:
                inconsistent = file_test(line, inconsistent)
    f.close()
    if inconsistent==0:
        print('No class name errors found between the ontology-working system')


def standardize_contents(py_ids, all_json_files):
    for str_file in all_json_files:
        file_id = all_json_files.index(str_file)
        file_path = os.path.abspath(str_file)
        # print('current processing: ', file_path)
        try:
            with open(file_path, encoding='utf-8', mode='r') as file:
                dic_ = json.load(file)
                py_id = py_ids[file_id]
                new_dic = check_tranform_json_keys(dic_, py_id)        
            file.close()
    
            with open(file_path, encoding='utf-8', mode='w') as file:
                json.dump(new_dic, file, ensure_ascii=False, indent=4)
            file.close()
    
        except Exception as e:
            print('ERROR in file: ', file_path)
            print(e)
            break


if __name__ == "__main__":
    user = 'Eric'
    KB_PATH='../知识固化库_' + user
    
    tec_cls_path = os.path.join(KB_PATH, 'Tec_class.txt')
    py_ids, all_json_files = get_pinyin_filenames(KB_PATH)
    
    MODE='standard' # standard check
    
    if MODE=='standard':
        standardize_contents(py_ids, all_json_files)
    elif MODE=='check':
        check_cls_consistency(tec_cls_path)
    else:
        pass
    
    

    
    

    
    
    
    
    
    
    
    
    
    
    
    
    
    
    
    
    
    
    