# -*- coding: utf-8 -*-
"""
This code contain the reachability approach to the mfd. 

The network traffic flow model is described using stochastic CTM. 

The stochasticity exist in the :
    
    - lane changing flow rate of the multilane road. 

yhbatch -o /BIGDATA1/zju_qhs_1/LateralSDE/tmp1.log /BIGDATA1/zju_qhs_1/LateralSDE/codes/run_FD_lanewidth.py


#============================================
容器云使用流程**********************************

一、登录容器云
1.第一种：使用ssh 登录： proxy.nscc-gz.cn，用户名：zju_qhs_1，密码：JQ7431wtymv
2.第二种通过网页登录：http://207669.proxy.nscc-gz.cn:8888/，打卡界面后输入星光的账户（qihongsheng@zju.edu.cn）和密码（JQ7431wtymv）

二、加载环境：
1.加载源：module load proxy
2.加载anaconda3： module load  anaconda3
3.加载虚拟环境：source activate virtual_env
4.如何安装第三方包可通过：conda install XXX 或 pip3 install XXX

三、修改文件：
1.第一种：可以通过命令直接修改
2.第二种：先登录星光账户（https://starlight.nscc-gz.cn/#/storage/udt），然后找到存储，找到远程服务器，选则“GPUFS”即可对文件进行修改；



#============================================
    天河安装第三方库分为两部分：

    1）不需要调用深度学习等库：

    第一步--加载源：source /BIGDATA1/app/toolshs/setproxy.sh 12.10.133.131 3128；
    第二步--加载anaconda3环境： module load anaconda3；
    第三步--通过pip install XXX 安装第三方库


    2）需要调用深度学习等库：
    第一步--加载源：source /BIGDATA1/app/toolshs/setproxy.sh 12.10.133.131 3128；
    第二步--加载anaconda3环境： module load anaconda3；
    第三步--激活虚拟环境：source activate virtual_env；
    第四步--通过conda install XXX 或者pip3 install XXX 安装第三方库


yhbatch: 

    yhbatch -o /BIGDATA1/zju_qhs_1/LateralSDE/tmp.log /BIGDATA1/zju_qhs_1/LateralSDE/codes/run_FD.py
    
    /BIGDATA1/zju_qhs_1/LateralSDE/datas/twodimFD

nohup:

    nohup python /BIGDATA1/zju_qhs_1/LateralSDE/codes/run_pso.py > /BIGDATA1/zju_qhs_1/LateralSDE/tmp.log 2>&1 &
    
    tail /BIGDATA1/zju_qhs_1/LateralSDE/tmp.log

ssh:

    ---------------------------------tianhe
    ssh -i /home/qhs/Qhs_Files/Program/Python/GraphicalSolutionArterialRoad/docs/tianhe2/许骏/zju_qhs_1.id -p 5566 zju_qhs_1@172.16.22.11

scp

    scp -i /home/qhs/Qhs_Files/Program/Python/GraphicalSolutionArterialRoad/docs/tianhe2/许骏/zju_qhs_1.id -P 5566 -r /home/qhs/Program/morphGPT/morphGPT/* zju_qhs_1@172.16.22.11:/GPUFS/zju_qhs_1/qhs/morphGPT

    ---------------------------------
    
####################################################################################
nohup: 
    
    nohup python /home/qhs/mfd_sde/codes/run_pomdp.py > /home/qhs/mfd_sde/tmp.log 2>&1 &
    
    nohup python /home/qhs/mfd_sde/codes/run_R1R3.py > /home/qhs/mfd_sde/R1R3_tmp.log 2>&1 &
    
    nohup python /home/qhs/mfd_sde/codes/run_deterministic.py > /home/qhs/mfd_sde/deterministic_tmp.log 2>&1 &
    
    #
    #nohup python /BIGDATA1/zju_qhs1/mfd_sde/codes/run_pomdp.py > /BIGDATA1/zju_qhs1/mfd_sde/tmp.log 2>&1 &
    
    
    cat /home/qhs/mfd_sde/tmp.log | grep loss
    
    cat /home/qhs/mfd_sde/R1R3_tmp.log | grep loss
    
    cat /home/qhs/mfd_sde/deterministic_tmp.log | grep loss
    

SCP copy codes:
    
    scp -i /home/qhs/Qhs_Files/Program/Python/MISC/mfd_sde/docs/root.pem -r /home/qhs/Qhs_Files/Program/Python/MISC/mfd_sde/codes root@120.26.13.85:/home/qhs/mfd_sde/
    
    scp -i /home/qhs/Qhs_Files/Program/Python/MISC/mfd_sde/docs/root.pem -r /home/qhs/Qhs_Files/Program/Python/MISC/mfd_sde/codes root@120.26.13.85:/home/qhs/mfd_sde/
    
    scp -i /home/qhs/Qhs_Files/Program/Python/MISC/mfd_sde/docs/root.pem -r /home/qhs/Qhs_Files/Program/Python/MISC/LateralSDE/* root@172.16.22.11:/BIGDATA1/zju_qhs_1/LateralSDE/
    
    
    scp -i /home/qhs/Qhs_Files/Program/Python/GraphicalSolutionArterialRoad/docs/tianhe2/许骏/zju_qhs_1.id -P 5566 -r /home/qhs/Qhs_Files/Program/Python/MISC/LateralSDE zju_qhs_1@172.16.22.11:/BIGDATA1/zju_qhs_1/
    
    
    scp -i /home/qhs/Qhs_Files/Program/Python/GraphicalSolutionArterialRoad/docs/tianhe2/许骏/zju_qhs_1.id -P 5566 -r /home/qhs/Qhs_Files/Program/Python/MISC/LateralSDE/codes zju_qhs_1@172.16.22.11:/BIGDATA1/zju_qhs_1/LateralSDE/
    
    
SCP download netU
    
    #----------------single net reused.
    scp -i /home/qhs/Qhs_Files/Program/Python/MISC/mfd_sde/docs/root.pem -r root@120.26.13.85:/home/qhs/mfd_sde/datas/convergences_batches.picklenetU.torch /home/qhs/Qhs_Files/Program/Python/MISC/mfd_sde/datas/netU_dict.torch; scp -i /home/qhs/Qhs_Files/Program/Python/MISC/mfd_sde/docs/root.pem -r root@120.26.13.85:/home/qhs/mfd_sde/datas/R1R3/convergences_batches.picklenetU.torch /home/qhs/Qhs_Files/Program/Python/MISC/mfd_sde/datas/R1R3/netU_dict.torch; scp -i /home/qhs/Qhs_Files/Program/Python/MISC/mfd_sde/docs/root.pem -r root@120.26.13.85:/home/qhs/mfd_sde/datas/deterministic/convergences_batches.picklenetU.torch /home/qhs/Qhs_Files/Program/Python/MISC/mfd_sde/datas/deterministic/netU_dict.torch
    
    
    #---------------series nets
    scp -i /home/qhs/Qhs_Files/Program/Python/MISC/mfd_sde/docs/root.pem -r root@120.26.13.85:/home/qhs/mfd_sde/datas/convergences_batches.picklenetUs.torch /home/qhs/Qhs_Files/Program/Python/MISC/mfd_sde/datas/netUs_dict.torch; scp -i /home/qhs/Qhs_Files/Program/Python/MISC/mfd_sde/docs/root.pem -r root@120.26.13.85:/home/qhs/mfd_sde/datas/R1R3/convergences_batches.picklenetUs.torch /home/qhs/Qhs_Files/Program/Python/MISC/mfd_sde/datas/R1R3/netUs_dict.torch; scp -i /home/qhs/Qhs_Files/Program/Python/MISC/mfd_sde/docs/root.pem -r root@120.26.13.85:/home/qhs/mfd_sde/datas/deterministic/convergences_batches.picklenetUs.torch /home/qhs/Qhs_Files/Program/Python/MISC/mfd_sde/datas/deterministic/netUs_dict.torch
    
    

    #download
    scp -i /home/qhs/Qhs_Files/Program/Python/MISC/mfd_sde/docs/root.pem -r root@120.26.13.85:/home/qhs/mfd_sde/datas/convergences_batches.pickle /home/qhs/Qhs_Files/Program/Python/MISC/mfd_sde/datas/convergences_batches.pickle 
    
    #get netY and netU
    scp -i /home/qhs/Qhs_Files/Program/Python/MISC/mfd_sde/docs/root.pem -r root@120.26.13.85:/home/qhs/mfd_sde/datas/convergences_batches.picklenetU.torch /home/qhs/Qhs_Files/Program/Python/MISC/mfd_sde/datas/netU_dict.torch
    #
    #
    scp -i /home/qhs/Qhs_Files/Program/Python/MISC/mfd_sde/docs/root.pem -r root@120.26.13.85:/home/qhs/mfd_sde/datas/convergences_batches.picklenetY.torch /home/qhs/Qhs_Files/Program/Python/MISC/mfd_sde/datas/netY_dict.torch
    
    scp -i /home/qhs/Qhs_Files/Program/Python/MISC/mfd_sde/docs/root.pem -r root@120.26.13.85:/home/qhs/mfd_sde/datas/timeusage.txt /home/qhs/Qhs_Files/Program/Python/MISC/mfd_sde/datas/timeusage.txt
    
    #===================================
    #R1R3
    scp -i /home/qhs/Qhs_Files/Program/Python/MISC/mfd_sde/docs/root.pem -r root@120.26.13.85:/home/qhs/mfd_sde/datas/R1R3/convergences_batches.picklenetU.torch /home/qhs/Qhs_Files/Program/Python/MISC/mfd_sde/datas/R1R3/netU_dict.torch
"""


from model.required_modules import *
import DataProcess as DP
import model.ContrasiveRoadNetworkTripChainPretraining as model
#
#ROOTDIR = '/home/qhs/Program/morphGPT/morphGPT/'
#
#dataspath = ROOTDIR + 'datas/'
#checkpoint_path = ROOTDIR + 'codes/checkpoints/'
#
if __name__ == '__main__':
	parser = argparse.ArgumentParser(description='morphGPT')
	parser.add_argument('--ROOTDIR', type=str, default='/home/qhs/Program/morphGPT/morphGPT/', help='root of morphGPT')
	parser.add_argument('--stereo', action='store_true', help='if yes, use RoadBEV-stereo; otherwise, RoadBEV-mono')
	parser.add_argument('--cla_res', type=float, default=0.5, help='class resolution for elevation classification')
	parser.add_argument('--batch_size', type=int, default=8, help='training batch size')
	parser.add_argument('--lr', type=float, default=8e-4, help='maximum learning rate')
	parser.add_argument('--epochs', type=int, default=50, help='number of epochs to train')
	parser.add_argument('--logdir', default='./checkpoints/', help='the directory to save logs and checkpoints')
	parser.add_argument('--loadckpt', default=None, help='load the weights from a specific checkpoint')
	parser.add_argument('--summary_freq', type=int, default=20, help='summary_freq')
	parser.add_argument('--seed', type=int, default=307, metavar='S', help='random seed')
	parser.add_argument('--osm_path', default='./checkpoints/', help='the path of the osm data')
	#
	dataspath = ROOTDIR + 'datas/'
	checkpoint_path = ROOTDIR + 'codes/checkpoints/'
	processed_data_path = ROOTDIR + 'datas/processed/'
	#
	reload(DP);reload(model)
	#
	device = torch.device("cuda")
	#dim_embedding_map should equals to mode_embedding_dim+3, '3' is for t,lon and lat. 
	system_configs = {'dim_embedding_map':8, 'mode_embedding_dim':10, 'num_mode_classification':8, \
							}
	#
	training_data = DP.morphGPTDataSet(root_dir = processed_data_path)
	train_dataloader = DataLoader(training_data, batch_size = 64, shuffle=True, collate_fn = DP.my_collate_fn)
	#
	#model = tripchaingenerative.to(device)
	tripchaingenerative = model.GenerativeTripChain(mode_embedding_dim = system_configs['mode_embedding_dim'], \
									morph_embeding_dim  = system_configs['dim_embedding_map'], \
									mode_classsification_N = system_configs['num_mode_classification'], decoder_arg_nhead = 13)
	tripchaingenerative = tripchaingenerative.to(device)
	MyLoss = model.MyLoss().to(device)
	#
	optimizer = optim.AdamW(tripchaingenerative.parameters(), lr=8e-4, )
	#
	LOSS_es = []
	for epoch_idx in tqdm(range(epochs)):
		for i, sample in enumerate(train_dataloader):
			#if i>100:break
			optimizer.zero_grad()
			#
			inputt,groundtruth = sample[0],sample[1]
			inputt = inputt.to(device)
			groundtruth = groundtruth.to(device)
			#inputt,groundtruth = sample[0].cuda(),sample[1].cuda()
			pred = tripchaingenerative(inputt, device = device)
			#
			LOSS = MyLoss(pred, groundtruth)
			LOSS_es.append(LOSS.detach().numpy()[()])
			#
			LOSS.backward()
			optimizer.step()
			#
			if i%1000==0:
				torch.save(tripchaingenerative.state_dict(), checkpoint_path + 'epoch_{}_step_{}.ckpt'.format(epoch_idx+1, i))
		#
		#torch.save(model.state_dict(), "{}/checkpoint_epoch{:0>2}_{:0>6}.ckpt".format(args.logdir, epoch_idx+1, global_step))
		pickle.dump(LOSS_es, dataspath + 'losses.pickle')
	
	
	
	
	
	
	
	
	
