# _*_ coding : utf-8 _*_
# @Time : 2023/10/9 10:46
# @Author : momo
# @File : data_helper
# @Project : bert-textcnn

# 预训练bert参数位置
bert_config_path = './chinese_bert_wwm_L-12_H-768_A-12/bert_config.json'
bert_checkpoint_path = 'chinese_bert_wwm_L-12_H-768_A-12/bert_model.ckpt'
bert_dict_path = './chinese_bert_wwm_L-12_H-768_A-12/vocab.txt'

#路径
ori_demo_path = './data/demo.xlsx'
ori_zl_path = './data/企业对应专利 2023-4-12.xlsx'
ori_node_path = './data/半导体产业链v1.xlsx'

model_path = './models/ha_bert_tcnn_.keras'
k_model_path='./models/'
bert_model_path='./models/bert_.keras'
# model_path='./models/bak/bert_tcnn_l1_best.keras'
# mlb_path= './data/mlb.pkl'
mlb_dir= './data/mlb'
logs_dir='./logs/'
res_dir= "./res/"
tb_logs_dir="./tblogs"

stronger_multiple=10

# 训练参数
epochs = 100
batch_size = 32
# ha
max_len = 256

# learning_rate =
learning_rate = 5e-4
# loss
drop_rate=0.3

l2_reg_lambda=1e-5

fc_size=200

attention_unit_size=256


alpha=0.5
# K折交叉验证
num_folds=10
random_seed=42