import json
import os
import ast
import re
import numpy as np
from utlis import gen_str_codes, clean_hhf



def fuse_kg_group_contents(kg_group, key_):
    sentences_ = kg_group[key_]
    kg_para = ''
    
    if isinstance(sentences_, list):
        if len(sentences_)>0:
            for sen in sentences_:
                kg_para += ''.join(list(sen.values()))
        else:
            return kg_para
    else:
        kg_para = sentences_
    return kg_para


def process_full_contents(content, inner_key, split_char='-->', para_seg='。'): # the 'para_seg' can be enhanced
    '''
        :核心功能：将一段text拆分为sentence-level的知识片段并comply with预先定义的json结构
    '''
    kg_structure = {}
    num_id = 1
    content_lst = []
    plain_sens = []
    
    # each 'key' corresponds several knowledge groups, each group corresponds to several paras
    content = content.split(split_char)[-1]
    paras = content.split(para_seg)
    paras = list(filter(None, paras))
    
    for i, sentence in enumerate(paras):
        if len(sentence.strip())==0:
            continue
        else:
            sentence = re.sub(r'\n', '__HHF__', sentence)
            if not (sentence.endswith('_TABLE') or sentence.endswith('_IMAGE')):
                sentence += para_seg
        
        if i==len(paras)-1 and (not (sentence.endswith('_TABLE') or sentence.endswith('_IMAGE'))):
            sentence = sentence + '__HHF__'
        
        sentence = clean_hhf(sentence)
        content_lst.append({str(num_id) : sentence})
        plain_sens.append(sentence)
        num_id += 1
    
    know_id = gen_str_codes('\n'.join(plain_sens) + inner_key)
    kg_structure.update({inner_key : [{know_id: content_lst}]})
    return kg_structure, know_id


def append_to_key(json_data, target_key, new_data):
    if isinstance(json_data, dict):
        if target_key in json_data and isinstance(json_data[target_key], list):
            # Append the content from new_data under the key target_key
            json_data[target_key].extend(new_data[target_key])
            return True
        else:
            # If we didn't find the key at the current level, search recursively in nested dictionaries/lists
            for key, value in json_data.items():
                if append_to_target_key(value, target_key, new_data):
                    return True
    elif isinstance(json_data, list):
        for item in json_data:
            if append_to_target_key(item, target_key, new_data):
                return True
    return False


def inject_knowledge(kg_structure, target_key, knowledge_path, USER_SETTINGS):
    inject_path = USER_SETTINGS['TEMP_INJECT_PATH']
    
    if os.path.exists(knowledge_path):
        with open(knowledge_path, 'r', encoding='utf-8') as f:
            exist_dic = json.load(f)
            append_to_key(exist_dic, target_key, kg_structure)
        
    else:
        os.makedirs(os.path.dirname(knowledge_path), exist_ok=True)
    
    with open(inject_path, 'w', encoding='utf-8') as f:
        json.dump(kg_structure, f, ensure_ascii=False)
    return 'inject_know.json', inject_path
            

def parse_json_paths(data, current_path=None, stop_key=None):
    """
    :function get all paths in a JSON-like structure. Stops recursion if stop_key is encountered.
    :parameters:
        data: The current level of data to process (could be dict, list, etc.).
        current_path: The current path up to this point (used for recursion).
        stop_key: The key on which to stop the recursion.
    :return:
        a list of paths to all key-value pairs in the structure.
    """
    if current_path is None:
        current_path = []     
    paths = []
    
    if isinstance(data, dict):
        for key, value in data.items():
            new_path = list(current_path)
            new_path.append(key)
            
            if key == stop_key:
                paths.append('-->'.join(new_path) + f": {value}")
                continue

            if isinstance(value, (dict, list)):
                paths.extend(parse_json_paths(value, new_path, stop_key))
            else:
                paths.append('-->'.join(new_path) + f": {value}")
    elif isinstance(data, list):
        for idx, value in enumerate(data):
            new_path = list(current_path)
            new_path.append(str(idx))
            if isinstance(value, (dict, list)):
                paths.extend(parse_json_paths(value, new_path, stop_key))
            else:
                paths.append('-->'.join(new_path) + f": {value}")
    return paths


def parse_know_item(json_path, know_prefix='content:'):
    know_item = json_path.split('-->')[-1]
    key_ = know_item.split(':')[0].strip()
    
    if know_item.startswith(know_prefix):
        content_str = know_item[len(know_prefix):].strip()
        if content_str.startswith(("{", "[", "'", '"')):
            try:
                content_obj = ast.literal_eval(content_str)
                return key_, {key_:content_obj}
            except (SyntaxError, ValueError):
                pass
        else:
            val_ = know_item.split(':')[-1].strip()
            return key_, {key_:val_}
    else:
        val_ = know_item.split(':')[-1].strip()
        return key_, {key_:val_}


def parse_parent_node(know_path, target):
    parts = know_path.split('-->')
    idx = -1
    for i, part_ in enumerate(parts):
        if target in part_:
            idx = i
        
    for i in range(idx - 1, -1, -1):
        if not parts[i].isnumeric():
            return parts[i]
    return -1

    
def vectorize_know(know_target, vec_func, model, tokenizer, use_content=True):
    '''
        :function: to convert certain .JSON file into semantic vectors, the .JSON file can have any number of depth, 
                while knowledge should be placed at the bottom level
        :parameters:
            know_target: string, path of the .JSON file
            use_content: boolean, if True, use the specific content to generate vectors, if False, only use keys to generate vectors
        :return:
            the keys of the .JOSN file
            the vectors of each key or the contents in each key
    '''
    
    with open(know_target, encoding='utf-8', mode='r') as f:
        know_structure = json.loads(f.read())
    f.close()
    # IMPORTANT, parse a .JSON file into a tree like structure
    parsed_paths = parse_json_paths(know_structure, stop_key='content')
    know_key_vecs = []
    know_paths = []
    
    for i, json_path in enumerate(parsed_paths):
        # retrieve bottom-level items, where the knowledge pieces lies in
        key_, bottom_dic = parse_know_item(json_path)
        parent_ = parse_parent_node(json_path, key_)
        
        if key_=='content':
            if use_content==True or 'UNK' in json_path:
                know_contents = fuse_kg_group_contents(bottom_dic, key_)
                if know_contents=='': # remove empty contents
                    _, k_vector = vec_func(parent_, tokenizer, model)
                    know_key_vecs.append(k_vector)
                    know_paths.append(parent_)
                else:
                    know_contents, k_vector = vec_func(know_contents, tokenizer, model)
                    know_key_vecs.append(k_vector)
                    know_paths.append(parent_)
            else:            
                _, k_vector = vec_func(parent_, tokenizer, model)
                know_key_vecs.append(k_vector)
                know_paths.append(parent_)
        else:
            continue
    
    know_key_vecs = np.array(know_key_vecs)
    return know_paths, know_key_vecs, parsed_paths, key_



if __name__ == "__main__":
    print()
        
    

    
    
    
    
    
    
    
    
    
    
    
    
    
    
    
    
    
    