import os
import sys
plat = sys.platform
pythons = ['/nlpdata/zhaoang/miniconda3/envs/tf1.15/bin/python', 'python'] #python
python = pythons[1] if plat == 'win32' else pythons[0]

def cmd_train_teacher():
    ELECTRA_DIR_BASE = './electra_res/chinese_electra_base_discriminator_pytorch'
    OUTPUT_DIR ='./DistOut'
    DATA_DIR ='./MSRA'

    ngpu = 2
    lr = 1e-4
    batch_size = 24
    length = 196
    ep = 30

    command = f'''
            {python} -m torch.distributed.launch \
            --nproc_per_node={ngpu} main.train.dist.py \
            --vocab_file {ELECTRA_DIR_BASE}/vocab.txt \
            --bert_config_file_T none \
            --bert_config_file_S {ELECTRA_DIR_BASE}/config.json \
            --init_checkpoint_S {OUTPUT_DIR}/pytorch_model_teacher_5303.bin \
            --do_train \
            --do_eval \
            --do_predict \
            --max_seq_length {length} \
            --train_batch_size {batch_size} \
            --random_seed 1337 \
            --train_file {DATA_DIR}/train_pos_msra.txt \
            --predict_file {DATA_DIR}/dev_pos_msra.txt \
            --num_train_epochs {ep} \
            --learning_rate {lr} \
            --ckpt_frequency 3 \
            --official_schedule linear \
            --output_dir {OUTPUT_DIR} \
            --gradient_accumulation_steps 1 \
            --output_encoded_layers true \
            --output_attention_layers false \
            --lr_decay 0.8 \
            --txtmode 0
        '''
    print(command.replace(' '*9,'\n'))
    os.system(command)

def cmd_plain_teacher_predict(cuda=0, bpos=0):
    ELECTRA_DIR_BASE = './electra_res/chinese_electra_base_discriminator_pytorch'
    OUTPUT_DIR ='./DistOut'
    DATA_DIR ='./MSRA'
    NLPDATA = '/audiodata6/zhaoang/clean_text_slices /audiodata6/zhaoang/clean_text_seg'
    ngpu = 1
    length = 196
    command = f'''
             CUDA_VISIBLE_DEVICES={cuda} nohup \
             {python} -u -m torch.distributed.launch \
            --nproc_per_node={ngpu} main.train.dist.py \
            --vocab_file {ELECTRA_DIR_BASE}/vocab.txt \
            --bert_config_file_T none \
            --bert_config_file_S {ELECTRA_DIR_BASE}/config.json \
            --predict_batch_size 64 \
            --init_checkpoint_S {OUTPUT_DIR}/pytorch_model_teacher_5303.bin \
            --do_dir_predict \
            --max_seq_length {length} \
            --random_seed 1337 \
            --output_dir {OUTPUT_DIR}/ \
            --file {NLPDATA}  \
            --txtmode 1 \
            --bpos {bpos}\
            --local_rank -1 \
            > _nohup.out.{bpos} 2>&1 & \
        '''
    print(command.replace(' '*9,'\n'))
    os.system(command)

def cmd_plain_student_predict(cuda=-1, bpos=0):
    ELECTRA_DIR_BASE = './electra_res/chinese_electra_small_discriminator_pytorch'
    OUTPUT_DIR ='./DistOut'
    DATA_DIR ='./MSRA'
    NLPDATA = '/audiodata6/zhaoang/text_slices /audiodata6/zhaoang/text_seg'
    ngpu = 1
    length = 196
    command = f'''
             CUDA_VISIBLE_DEVICES={cuda} nohup \
             {python} -u -m torch.distributed.launch \
            --nproc_per_node={ngpu} main.train.dist.py \
            --vocab_file {ELECTRA_DIR_BASE}/vocab.txt \
            --bert_config_file_T none \
            --bert_config_file_S {ELECTRA_DIR_BASE}/config.json \
            --predict_batch_size 1 \
            --init_checkpoint_S {OUTPUT_DIR}/pytorch_model_student_89110.bin \
            --do_dir_predict \
            --max_seq_length {length} \
            --random_seed 1337 \
            --output_dir {OUTPUT_DIR}/ \
            --file {NLPDATA}  \
            --txtmode 1 \
            --bpos {bpos}\
            --local_rank -1 \
            > _nohup.out.{bpos} 2>&1 & \
        '''
    print(command.replace(' '*9,'\n'))
    os.system(command)

def loop_cmd_plain_teacher_predict():
    loop = 6
    for i in range(loop):
        cuda = i % 2
        bpos = (i + 0) * 50
        cmd_plain_teacher_predict(cuda, bpos)

def cmd_train_student():
    ELECTRA_DIR_BASE ='./electra_res/chinese_electra_base_discriminator_pytorch'
    ELECTRA_DIR_SMALL ='./electra_res/chinese_electra_small_discriminator_pytorch'
    OUTPUT_DIR ='./DistOut'
    DATA_DIR ='./MSRA'
    student_config_file =f'{ELECTRA_DIR_SMALL}/config.json'
    trained_teacher_model_file =f'{OUTPUT_DIR}/pytorch_model_teacher_5303.bin'
    ngpu = 2
    lr = 1e-4
    temperature = 8
    batch_size = 24
    length = 196
    ep = 50

    command = f'''
             {python} -m torch.distributed.launch \
            --nproc_per_node={ngpu} main.distill.dist.py \
            --vocab_file {ELECTRA_DIR_BASE}/vocab.txt \
            --bert_config_file_T {ELECTRA_DIR_BASE}/config.json \
            --tuned_checkpoint_T {trained_teacher_model_file} \
            --bert_config_file_S {student_config_file} \
            --init_checkpoint_S {OUTPUT_DIR}/pytorch_model_student_42963.bin \
            --do_train \
            --do_eval \
            --do_predict \
            --max_seq_length {length} \
            --train_batch_size {batch_size} \
            --random_seed 1337 \
            --train_file {DATA_DIR}/train_pos_msra.txt \
            --predict_file {DATA_DIR}/dev_pos_msra.txt \
            --num_train_epochs {ep} \
            --learning_rate {lr} \
            --ckpt_frequency 3 \
            --official_schedule linear \
            --output_dir {OUTPUT_DIR} \
            --gradient_accumulation_steps 1 \
            --temperature {temperature} \
            --output_encoded_layers true \
            --output_attention_layers false \
            --local_rank -1
            '''
    print(command.replace(' '*9,'\n'))
    os.system(command)

if __name__ == "__main__":
    #cmd_train_teacher()
    cmd_plain_student_predict()
    #loop_cmd_plain_teacher_predict()
    # from ltp import LTP
    
