import pickle

from transformers import BertTokenizer


a = [1, 2, 3, 4, 5, 6, 7, 8, 9]
b = a + [0]
print(b)


# tokenizer = BertTokenizer.from_pretrained("/home/livosr/bert-base-cased")
#
# def conll_loading(args, pad_token=0, pad_token_segment_id=0):
#     with open("train_fg_1_pg_1.pth", "rb") as f:
#         inputs_train_dict, y_train_dict = pickle.load(f)
#     features = []
#     sequence_a_segment_id = 0
#     max_seq_length=128
#     for key in ['0_0', '1_1', '2_2', '3_3']:
#
#         inputs = inputs_train_dict[key]
#         ys = y_train_dict[key]
#
#         for input_ids, label_ids in zip(inputs, ys):
#             segment_ids = [sequence_a_segment_id] * len(input_ids)
#             input_mask = [1] * len(input_ids)
#
#             padding_length = max_seq_length - len(input_ids)
#
#             input_ids += ([pad_token] * padding_length)
#             input_mask += ([0] * padding_length)
#             segment_ids += ([pad_token_segment_id] * padding_length)
#             label_ids += ([-100] * padding_length)
#
#             assert len(input_ids) == max_seq_length
#             assert len(input_mask) == max_seq_length
#             assert len(segment_ids) == max_seq_length
#             assert len(label_ids) == max_seq_length, print(len(label_ids), max_seq_length)
#             print(input_ids)
#             print(label_ids)
#             print(segment_ids)
#             print(input_mask)
#             exit(0)
#
#
#             features.append(
#                 InputFeatures(input_ids=input_ids,
#                               input_mask=input_mask,
#                               segment_ids=segment_ids,
#                               label_ids=label_ids))
#
# conll_loading(pad_token=tokenizer.pad_token_id, pad_token_segment_id=0)
