from tqdm import tqdm
import random
import pickle
def sent2char(line):
    """
    句子处理成单词
    :param line: 原始行
    :return: 单词， 标签
    """
    res = line.strip('\n').split()
    return res

def train_val_split(X, y, valid_size=0.2, random_state=2018, shuffle=True):
        """
        训练集验证集分割
        :param X: sentences
        :param y: labels
        :param random_state: 随机种子
        """
        #logger.info('Train val split')

        data = []
        for data_x, data_y in tqdm(zip(X, y), desc='Merge'):
            data.append((data_x, data_y))
        del X, y

        N = len(data)
        test_size = int(N * valid_size)

        if shuffle:
            random.seed(random_state)
            random.shuffle(data)

        valid = data[:test_size]
        train = data[test_size:]
        print(valid[:2])
        return train, valid
i=0
targets, sentences = [],[]
with open("source_BIO_2014_cropus.txt", 'r') as fr_1,open("target_BIO_2014_cropus.txt", 'r') as fr_2:
        for sent,label in zip(fr_1, fr_2):
            chars = sent.strip('\n')
            label = label.strip('\n')
            sentences.append(chars)
            targets.append(label)

        N = len(sentences)
        valid_size=0.2
        test_size = int(N * valid_size)

        
        valid = sentences[:test_size]
        test = sentences[test_size:2*test_size]
        train=sentences[2*test_size:]

        valid_l = targets[:test_size]
        test_l = targets[test_size:2*test_size]
        train_l= targets[2*test_size:]
        with open("train_sen.pkl",'wb') as f1,open("valid_sen.pkl",'wb') as f2,open("test_sen.pkl",'wb') as f3:
            pickle.dump(train, f1)
            pickle.dump(valid, f2)
            pickle.dump(test, f3)
        with open("train_lab.pkl",'wb') as f1,open("valid_lab.pkl",'wb') as f2,open("test_lab.pkl",'wb') as f3:
            pickle.dump(train_l, f1)
            pickle.dump(valid_l, f2)
            pickle.dump(test_l, f3)
        # with open("train_sen.pkl", 'rb') as text:
        #     data=pickle.load(text)
        # print(data[:2],len(data))

        # with open("train_lab.pkl", 'rb') as text:
        #     data=pickle.load(text)
        # print(data[:2],len(data))
       
       


        # li=set()
        # with open("valid_sen.pkl", 'rb') as fr_1, open("valid_lab.pkl", 'rb') as fr_2:
        #     sent=pickle.load(fr_1)
        #     labels=pickle.load(fr_2)
        #     print(len(sent),len(labels))
        #     for item in labels:
        #        ne=item.split()
        #        li=li|set(ne)
        #     print(li)
             
            







'''
[(['“', '我', '们', '的', '账', '户', '并', '没', '有', '被', '黑', ',', '这', '些', '消', '息', '都', '是', '我', '发', '布', '的', '”', ',', '苏', '娜', '达', '这', '样', '表', '示', '。', '据', '悉', ',', '此', '前', '曾', '为', '联', '合', '国', '外', '交', '官', ',', '并', '曾', '参', '与', '角', '逐', '联', '合', '国', '秘', '书', '长', '一', '职', '的', '沙', '洛', '尔', '是', '印', '度', '政', '府', '中', '的', '“', '推', '特', '”', '活', '跃', '用', '户', '。'], ['O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'B_PER', 'I_PER', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'B_T', 'I_T', 'O', 'O', 'B_ORG', 'I_ORG', 'I_ORG', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'B_ORG', 'I_ORG', 'I_ORG', 'O', 'O', 'O', 'O', 'O', 'O', 'B_PER', 'I_PER', 'I_PER', 'O', 'B_LOC', 'I_LOC', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O']), (['有', '意', '为', '难'], ['O', 'O', 'O', 'O'])]
[(['图', '片', '来', '源', ':', '人', '民', '视', '觉'], ['O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O']), (['罗', '某', ',', '一', '名', '父', '亲', ',', '4', '0', '多', '岁', ';', '小', '晴', ',', '他', '的', '女', '儿', ',', '1', '1', '岁', ';', '罗', '某', '的', '妻', '子', ',', '在', '武', '平', '某', '单', '位', '上', '班', '。', '一', '家', '三', '口', ',', '住', '在', '龙', '岩', '武', '平', '县', '城', '的', '一', '栋', '自', '建', '房', '内', '。'], ['O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'B_PER', 'I_PER', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'B_LOC', 'I_LOC', 'I_PER', 'I_PER', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O'])]
'''
