# -*- coding: utf-8 -*-
"""
This code contain the reachability approach to the mfd. 

The network traffic flow model is described using stochastic CTM. 

The stochasticity exist in the :
    
    - lane changing flow rate of the multilane road. 

yhbatch -o /BIGDATA1/zju_qhs_1/LateralSDE/tmp1.log /BIGDATA1/zju_qhs_1/LateralSDE/codes/run_FD_lanewidth.py


#============================================
容器云使用流程**********************************

一、登录容器云
1.第一种：使用ssh 登录： proxy.nscc-gz.cn，用户名：zju_qhs_1，密码：JQ7431wtymv
2.第二种通过网页登录：http://207669.proxy.nscc-gz.cn:8888/，打卡界面后输入星光的账户（qihongsheng@zju.edu.cn）和密码（JQ7431wtymv）

二、加载环境：
1.加载源：module load proxy
2.加载anaconda3： module load  anaconda3
3.加载虚拟环境：source activate virtual_env
4.如何安装第三方包可通过：conda install XXX 或 pip3 install XXX

三、修改文件：
1.第一种：可以通过命令直接修改
2.第二种：先登录星光账户（https://starlight.nscc-gz.cn/#/storage/udt），然后找到存储，找到远程服务器，选则“GPUFS”即可对文件进行修改；



#============================================
    天河安装第三方库分为两部分：

    1）不需要调用深度学习等库：

    第一步--加载源：source /BIGDATA1/app/toolshs/setproxy.sh 12.10.133.131 3128；
    第二步--加载anaconda3环境： module load anaconda3；
    第三步--通过pip install XXX 安装第三方库


    2）需要调用深度学习等库：
    第一步--加载源：source /BIGDATA1/app/toolshs/setproxy.sh 12.10.133.131 3128；
    第二步--加载anaconda3环境： module load anaconda3；
    第三步--激活虚拟环境：source activate virtual_env；
    第四步--通过conda install XXX 或者pip3 install XXX 安装第三方库


yhbatch: 

    yhbatch -o /BIGDATA1/zju_qhs_1/LateralSDE/tmp.log /BIGDATA1/zju_qhs_1/LateralSDE/codes/run_FD.py
    
    /BIGDATA1/zju_qhs_1/LateralSDE/datas/twodimFD

nohup:

    nohup python /BIGDATA1/zju_qhs_1/LateralSDE/codes/run_pso.py > /BIGDATA1/zju_qhs_1/LateralSDE/tmp.log 2>&1 &
    
    tail /BIGDATA1/zju_qhs_1/LateralSDE/tmp.log

ssh:

    ---------------------------------tianhe
    ssh -i /home/qhs/Qhs_Files/Program/Python/GraphicalSolutionArterialRoad/docs/tianhe2/许骏/zju_qhs_1.id -p 5566 zju_qhs_1@172.16.22.11

scp


    scp -i /home/qhs/Qhs_Files/Program/Python/GraphicalSolutionArterialRoad/docs/tianhe2/许骏/zju_qhs_1.id -P 5566 -r /home/qhs/Program/morphGPT/morphGPT/* zju_qhs_1@172.16.22.11:/GPUFS/zju_qhs_1/qhs/morphGPT/

    ---------------------------------
    
####################################################################################
nohup: 
    
    nohup python /home/qhs/mfd_sde/codes/run_pomdp.py > /home/qhs/mfd_sde/tmp.log 2>&1 &
    
    nohup python /home/qhs/mfd_sde/codes/run_R1R3.py > /home/qhs/mfd_sde/R1R3_tmp.log 2>&1 &
    
    nohup python /home/qhs/mfd_sde/codes/run_deterministic.py > /home/qhs/mfd_sde/deterministic_tmp.log 2>&1 &
    
    #
    #nohup python /BIGDATA1/zju_qhs1/mfd_sde/codes/run_pomdp.py > /BIGDATA1/zju_qhs1/mfd_sde/tmp.log 2>&1 &
    
    
    cat /home/qhs/mfd_sde/tmp.log | grep loss
    
    cat /home/qhs/mfd_sde/R1R3_tmp.log | grep loss
    
    cat /home/qhs/mfd_sde/deterministic_tmp.log | grep loss
    

SCP copy codes:
    
    scp -i /home/qhs/Qhs_Files/Program/Python/GraphicalSolutionArterialRoad/docs/tianhe2/许骏/zju_qhs_1.id -P 5566 -r /home/qhs/Program/morphGPT/morphGPT/inbooks zju_qhs_1@172.16.22.11:/BIGDATA1/zju_qhs_1/qqq/morphGPT
    
    
    scp -i /home/qhs/Qhs_Files/Program/Python/MISC/mfd_sde/docs/root.pem -r /home/qhs/Qhs_Files/Program/Python/MISC/mfd_sde/codes root@120.26.13.85:/home/qhs/mfd_sde/
    
    scp -i /home/qhs/Qhs_Files/Program/Python/MISC/mfd_sde/docs/root.pem -r /home/qhs/Qhs_Files/Program/Python/MISC/mfd_sde/codes root@120.26.13.85:/home/qhs/mfd_sde/
    
    scp -i /home/qhs/Qhs_Files/Program/Python/MISC/mfd_sde/docs/root.pem -r /home/qhs/Qhs_Files/Program/Python/MISC/LateralSDE/* root@172.16.22.11:/BIGDATA1/zju_qhs_1/LateralSDE/
    
    
    
    scp -i /home/qhs/Qhs_Files/Program/Python/GraphicalSolutionArterialRoad/docs/tianhe2/许骏/zju_qhs_1.id -P 5566 -r /home/qhs/Qhs_Files/Program/Python/MISC/LateralSDE zju_qhs_1@172.16.22.11:/BIGDATA1/zju_qhs_1/
    
    
    scp -i /home/qhs/Qhs_Files/Program/Python/GraphicalSolutionArterialRoad/docs/tianhe2/许骏/zju_qhs_1.id -P 5566 -r /home/qhs/Qhs_Files/Program/Python/MISC/LateralSDE/codes zju_qhs_1@172.16.22.11:/BIGDATA1/zju_qhs_1/LateralSDE/
    
    
SCP download netU
    
    #----------------single net reused.
    scp -i /home/qhs/Qhs_Files/Program/Python/MISC/mfd_sde/docs/root.pem -r root@120.26.13.85:/home/qhs/mfd_sde/datas/convergences_batches.picklenetU.torch /home/qhs/Qhs_Files/Program/Python/MISC/mfd_sde/datas/netU_dict.torch; scp -i /home/qhs/Qhs_Files/Program/Python/MISC/mfd_sde/docs/root.pem -r root@120.26.13.85:/home/qhs/mfd_sde/datas/R1R3/convergences_batches.picklenetU.torch /home/qhs/Qhs_Files/Program/Python/MISC/mfd_sde/datas/R1R3/netU_dict.torch; scp -i /home/qhs/Qhs_Files/Program/Python/MISC/mfd_sde/docs/root.pem -r root@120.26.13.85:/home/qhs/mfd_sde/datas/deterministic/convergences_batches.picklenetU.torch /home/qhs/Qhs_Files/Program/Python/MISC/mfd_sde/datas/deterministic/netU_dict.torch
    
    
    #---------------series nets
    scp -i /home/qhs/Qhs_Files/Program/Python/MISC/mfd_sde/docs/root.pem -r root@120.26.13.85:/home/qhs/mfd_sde/datas/convergences_batches.picklenetUs.torch /home/qhs/Qhs_Files/Program/Python/MISC/mfd_sde/datas/netUs_dict.torch; scp -i /home/qhs/Qhs_Files/Program/Python/MISC/mfd_sde/docs/root.pem -r root@120.26.13.85:/home/qhs/mfd_sde/datas/R1R3/convergences_batches.picklenetUs.torch /home/qhs/Qhs_Files/Program/Python/MISC/mfd_sde/datas/R1R3/netUs_dict.torch; scp -i /home/qhs/Qhs_Files/Program/Python/MISC/mfd_sde/docs/root.pem -r root@120.26.13.85:/home/qhs/mfd_sde/datas/deterministic/convergences_batches.picklenetUs.torch /home/qhs/Qhs_Files/Program/Python/MISC/mfd_sde/datas/deterministic/netUs_dict.torch
    
    

    #download
    scp -i /home/qhs/Qhs_Files/Program/Python/MISC/mfd_sde/docs/root.pem -r root@120.26.13.85:/home/qhs/mfd_sde/datas/convergences_batches.pickle /home/qhs/Qhs_Files/Program/Python/MISC/mfd_sde/datas/convergences_batches.pickle 
    
    #get netY and netU
    scp -i /home/qhs/Qhs_Files/Program/Python/MISC/mfd_sde/docs/root.pem -r root@120.26.13.85:/home/qhs/mfd_sde/datas/convergences_batches.picklenetU.torch /home/qhs/Qhs_Files/Program/Python/MISC/mfd_sde/datas/netU_dict.torch
    #
    #
    scp -i /home/qhs/Qhs_Files/Program/Python/MISC/mfd_sde/docs/root.pem -r root@120.26.13.85:/home/qhs/mfd_sde/datas/convergences_batches.picklenetY.torch /home/qhs/Qhs_Files/Program/Python/MISC/mfd_sde/datas/netY_dict.torch
    
    scp -i /home/qhs/Qhs_Files/Program/Python/MISC/mfd_sde/docs/root.pem -r root@120.26.13.85:/home/qhs/mfd_sde/datas/timeusage.txt /home/qhs/Qhs_Files/Program/Python/MISC/mfd_sde/datas/timeusage.txt
    
    #===================================
    #R1R3
    scp -i /home/qhs/Qhs_Files/Program/Python/MISC/mfd_sde/docs/root.pem -r root@120.26.13.85:/home/qhs/mfd_sde/datas/R1R3/convergences_batches.picklenetU.torch /home/qhs/Qhs_Files/Program/Python/MISC/mfd_sde/datas/R1R3/netU_dict.torch
"""
#
import os,sys
import pandas as pd
from tqdm import tqdm
import copy
import pickle

#
ROOT = '/media/znr/Z/'
os.chdir(ROOT + 'morphGPT/codes/')
from required_modules import *
import DataProcess as DP

#
dataspath      = ROOT + 'morphGPT/datas/OpenPFLOW/DataSet/'
saveddataspath =  ROOT + 'morphGPT/datas/processed/train/'
# Check if the folder exists
if not os.path.exists(saveddataspath + 'input'):
    # The folder does not exist, create it
    os.makedirs(saveddataspath + 'input')
if not os.path.exists(saveddataspath + 'groundtruth'):
    # The folder does not exist, create it
    os.makedirs(saveddataspath + 'groundtruth')

#TRIPSDATA[i] = {uid:trip_data}, trip_data is a dataframe. 
#TRIPSDATA = {}
for i in range(1, 10):
	#
	filename = dataspath + 'trajectory0' + str(i)+ '.tsv'
	tripdata1 = pd.read_csv(filename, sep='\t', header = None, skiprows=[0])
	#
	uids = sorted(set(tripdata1[0]))
	#trips_data_seperated[uid] = trip_data_single_user
	trips_data_seperated = {}
	for uid in tqdm(uids):
		data = tripdata1[tripdata1[0] ==uids[0]].sort_values(by=1, ascending=True)
		#trips_data_seperated[uid] = copy.deepcopy(tripdata1[tripdata1[0] ==uids[0]])
		trip_data_embeding,groundtruth = DP.TripDataProcess.Trajec2EmbeddingWithLabel_delta_t_delta_lon_lat(data.values[:, 1:],\
														num_embeddings_mode = 8, mode_embedding_dim = 10, )
		#
		torch.save(trip_data_embeding, saveddataspath +  'input/' + str(i) + '_' + str(uid) + '.pt')
		torch.save(groundtruth, saveddataspath +  'groundtruth/' + str(i) + '_' + str(uid) + '.pt')
	#
