import json
import os
import pickle
from datasets import load_dataset
from transformers import AutoTokenizer
import jsonlines

from .const_utils import *
from .func_utils import load_pickle, save_pickle


def _get_dataset_size(data_type,extra=False):
    if extra and data_type == DATA_EN:
        return EXTRA_EN_SIZE
    if data_type == DATA_REALNEWS:
        return REALNEWS_SIZE
    if data_type == DATA_LFQA:
        return LFQA_SIZE
    if data_type == DATA_EN:
        return EN_SIZE

def load_prompt_dataset(data_type,extra_data=False):
    cache_path = f'dataset/{data_type}/prompt.json'
    with open(cache_path, "r") as f:
        dataset = json.load(f)
    print("load prompt dataset from", data_type, _get_dataset_size(data_type))
    return dataset[:_get_dataset_size(data_type)]
    

def read_json(filepath):
    with open(filepath, "r") as f:
        return json.load(f)

def write_json(data,filepath):
    with open(filepath, "w") as f:
        json.dump(data, f, indent=2, ensure_ascii=False)

def read_jsonl(filepath):
    data = []
    with open(filepath,"r") as f:
        for line in f:
            data.append(json.load(line.strip()))
        return data


def write_jsonl(outpath, data):
    with jsonlines.open(outpath,mode="w") as writer:
        for datum in data:
            writer.write(datum)
        print("write to "+outpath)


if __name__ == '__main__':
    dataset = load_prompt_dataset(DATA_EN)