from nlhappy.data import Doc, DocBin, DatasetDict
import pandas as pd
from tqdm import tqdm


train_df = pd.read_json('assets/CMeEE-V2/CMeEE-V2_train.json')
val_df = pd.read_json('assets/CMeEE-V2/CMeEE-V2_dev.json')

def convert_to_docbin(df) -> DocBin:
    db = DocBin()
    dirty = []
    for row in tqdm(df.itertuples()):
        doc = Doc(text=row.text)
        for ent in row.entities:
            indices = [i for i in range(ent['start_idx'], ent['end_idx']+1)]
            ent_text = ent['entity']
            ent_label = ent['type']
            try:
                doc.add_ent(indices=indices, label=ent_label, text=ent_text)
            except Exception as e:
                if len(ent_text)==0:
                    dirty.append(row)
                    print('found dirty')
                else:
                    print(row)
                    raise e
        if doc.ents:
            db.append(doc=doc)
    return db, dirty

train_db, train_dirty = convert_to_docbin(train_df)
val_db, val_dirty = convert_to_docbin(val_df)

train_ds = train_db.to_ner_dataset(piece_max_length=500, only_have_ent=True)
val_ds = val_db.to_ner_dataset(piece_max_length=500, only_have_ent=True)
ds = DatasetDict({'train': train_ds, 'validation': val_ds})
ds.save_to_disk('./datasets/CMeEE-V2')