from utils.config import label_path, input_path, txt_path, radar_path, log_path
import os
from datetime import datetime, timedelta
import numpy as np
from utils.logger import Logger
from multiprocessing import Pool
import random
from utils.tools import get_rad


datasets_logger = Logger(filename=f'{log_path}/dataset_{datetime.now().year}-{datetime.now().month}.log', level='info',
                         when='D', back_count=30).logger


def make_all_data(input_file):
    apcp_dir, apcp_name = os.path.split(input_file)

    dis = datetime.strptime(apcp_name[:12], '%Y%m%d%H%M') - datetime.strptime(apcp_dir[-14:-2], '%Y%m%d%H%M')
    if dis.seconds < 3600:  # 2小时内

        labelTime = (datetime.strptime(apcp_name[:12], '%Y%m%d%H%M') + timedelta(hours=8)).strftime('%Y%m%d%H%M')
        label_file = f"{label_path}/{labelTime[:8]}/{labelTime}.000"
        radar_dir = datetime.strptime(apcp_name[:8], '%Y%m%d').strftime('%Y-%m-%d')
        radar_file = f"{radar_path}/{radar_dir}/MCR/Z_OTHE_RADAMCR_{apcp_name[:12]}00.bin.bz2"
        if os.path.isfile(radar_file) and os.path.isfile(label_file):
            apcp_data = np.loadtxt(input_file).copy()[120:-200, 200:-160]

            try:
                raddata, lat0, lon0 = get_rad(radar_file)
            except:
                print("error radar file:", radar_file)
                return
            raddata = raddata[140:-200, 220:-180] / 5
            station_data = np.nan_to_num(np.loadtxt(label_file))  # [:160, :160]

            target_data = np.where(station_data > 0, station_data, raddata)
            aa = apcp_data > 0
            bb = target_data > 0

            iou = (aa * bb).sum() / (aa.sum()+bb.sum())
            # print(iou)

            if iou > 0.3:
                allTxt.write(f"{input_file} {radar_file} {label_file}\n")
                print(f"find {input_file}")


def data_split():
    infiles = []
    for root, dirs, files in os.walk(input_path, topdown=False):
        for file in files:
            if int(file[:8]) < 20210625:  # or int(file[:8]) > 20210705:
                continue

            input_file = os.path.join(root, file)
            infiles.append(input_file)

    pool = Pool(processes=8)
    pool.map(make_all_data, infiles)
    pool.close()
    pool.join()

    allTxt.close()

    trainTxt = open(os.path.join(txt_path, "tp_train.txt"), "w")
    valTxt = open(os.path.join(txt_path, "tp_val.txt"), "w")
    testTxt = open(os.path.join(txt_path, "tp_test.txt"), "w")

    allfiles = open(os.path.join(txt_path, "tp_all.txt"), "r").readlines()
    test_list = []
    for data_path in allfiles:
        acpc_, radar_, label_ = data_path.strip().split(' ')
        if int(os.path.basename(acpc_)[:8]) > 20211001:
            test_list.append(data_path)

    random.seed(222)
    val_list = random.sample(allfiles, int(len(allfiles) * 0.15))
    allfiles.sort()

    train_cnt = 0
    val_cnt = 0
    test_cnt = 0
    for data_path in allfiles:
        acpc_, radar_, label_ = data_path.strip().split(' ')
        if int(os.path.basename(acpc_)[:8]) < 20210701:
            continue
        print("add", acpc_)

        if data_path in val_list:
            val_cnt += 1
            valTxt.write(data_path)

        elif data_path in test_list:
            test_cnt += 1
            testTxt.write(data_path)

        else:
            train_cnt += 1
            trainTxt.write(data_path)

    trainTxt.close()
    valTxt.close()

    msg = f"VALID DATA COUNT：{len(allfiles)}"
    datasets_logger.info(msg)

    if train_cnt * val_cnt == 0:
        datasets_logger.error("No Train/Val Data Count!")


if __name__ == '__main__':
    allTxt = open(os.path.join(txt_path, f"tp_all.txt"), "w")

    data_split()

    allTxt.close()
