"""
解析 resource/data/json/context.jsonl文件 , 生成切词后的数据
"""
from config.Config import BASE_DIR, EOS_TOKEN, BOS_TOKEN
from cio.jsonl import JsonLReader, JsonLWriter
from utils.parallel import parallel_map
from utils.splitter import code_split, nl_split
from tqdm import tqdm


def item2split(item: dict) -> dict:
    method = [BOS_TOKEN] + code_split(item['method']) + [EOS_TOKEN]
    summary = [BOS_TOKEN] + nl_split(item['summary']) + [EOS_TOKEN]
    context = [BOS_TOKEN]
    context += ['<pn>'] + code_split(item['package_name']) + ['</pn>']
    context += ['<cs>'] + code_split(item['class_signature']) + ['</cs>']
    context += [EOS_TOKEN]
    for each in item['called_statements']:
        context += ['<ds>'] + code_split(each) + ['</ds>']
    for each in item['callee_signatures']:
        context += ['<es>'] + code_split(each) + ['</es>']
    return {
        "id": item["id"],
        "method": method,
        "summary": summary,
        "context": context
    }


if __name__ == '__main__':
    src_file1 = BASE_DIR / 'resource' / 'data' / 'consum' / 'context.train.jsonl'
    tgt_file1 = BASE_DIR / 'resource' / 'data' / 'consum' / 'split_context.train.jsonl'

    src_file2 = BASE_DIR / 'resource' / 'data' / 'consum' / 'context.test.jsonl'
    tgt_file2 = BASE_DIR / 'resource' / 'data' / 'consum' / 'split_context.test.jsonl'

    w1 = JsonLWriter(tgt_file1)
    src_data1 = JsonLReader(src_file1).yield_read()
    for each in src_data1:
        tgt = item2split(each)
        w1.save_one(tgt)

    w2 = JsonLWriter(tgt_file2)
    src_data2 = JsonLReader(src_file2).yield_read()
    for each in src_data2:
        tgt = item2split(each)
        w2.save_one(tgt)
