# -*- coding: utf-8 -*-
# Program function：
import torch
from transformers import BertModel,BertTokenizer,BertConfig

train_path = r'..\dm00_data\train.txt'
dev_path = r'..\dm00_data\dev.txt'
test_path = r'..\dm00_data\test.txt'
num2class = {i:v.strip() for i, v in enumerate(open(r'..\dm00_data\departments.txt','r').readlines())}
class2num = {v:i for i, v in num2class.items()}

bert_model_path = r'C:\Users\73974\Desktop\ai\a__xiangmu\bert\bert_model\bert_model.pt'
quantize_bert_model_path = r'C:\Users\73974\Desktop\ai\a__xiangmu\bert\bert_model\quantize_bert_model.pt'

# 参数
# bert模型参数
bert_path = r'C:\Users\73974\Desktop\ai\a__xiangmu\bert\bert-base-chinese'
bert_tokenizer = BertTokenizer.from_pretrained(bert_path)
bert_config = BertConfig.from_pretrained(bert_path)
bert_model = BertModel.from_pretrained(bert_path)

# student参数
lstm_embedding_dim = 128
lstm_hidden_dim = 256
lstm_learning_rate = 1e-3
lstm_num_layers = 3
lstm_model_path = r'C:\Users\73974\Desktop\ai\a__xiangmu\bert\bert_model\lstm_model.pt'
lstm_dropout = 0.3

max_len = 32
batch_size = 64
epochs = 2
lr = 5e-5
device = 'cuda' if torch.cuda.is_available() else 'cpu'
class_num = 10
