from datetime import datetime, timedelta
from utils.config import input_path, label_path, weight_root
import os
import numpy as np
import json
from model.SmaAt_UNet import SmaAt_UNet
import torch
from utils.s2d import space_to_depth2
from utils.d2s import depth_to_space
from utils.tools import ts_station
import time

with open('/home/gym/projects/my_test/data/idxs.json') as f:
    idxs = tuple(np.array(json.load(f)).T)

xdim, ydim, reso = 560, 800, 0.01

model = SmaAt_UNet(in_channels=4, out_channels=4)
# model = UNet()
device = gpu1 = torch.device("cuda:0")
weight_file = os.path.join(weight_root, f"20220304.pth")
print(weight_file)
checkpoints = torch.load(weight_file, map_location=device)
state_dict = checkpoints['state_dict']
model.load_state_dict(state_dict)
model.to(device)
model.eval()

start = time.time()
# fmins = np.linspace(24, 360, 15).astype(int).tolist()
fmins = np.linspace(12, 360, 30).tolist()
ths = [0.1, 1, 5, 10, 15]
alltsx, alltsy = np.zeros((len(fmins), len(ths))), np.zeros((len(fmins), len(ths)))
allmaex, allmaey = np.zeros(len(fmins)), np.zeros(len(fmins))

first_bj_time = '202110010000'
for i, fm in enumerate(fmins):
    time_steps = np.linspace(60, 1440, 1440 // 60)

    apcp_score, sub_score = [], []
    for day in range(31):
        bj_time = (datetime.strptime(first_bj_time, '%Y%m%d%H%M') + timedelta(days=day)).strftime('%Y%m%d%H%M')
        utc_time = (datetime.strptime(bj_time, '%Y%m%d%H%M') - timedelta(hours=8)).strftime('%Y%m%d%H%M')
        for step in time_steps:
            start_utc_time = (datetime.strptime(utc_time[:12], '%Y%m%d%H%M') + timedelta(minutes=step)).strftime(
                '%Y%m%d%H%M')
            utc_ftime = (datetime.strptime(start_utc_time[:12], '%Y%m%d%H%M') + timedelta(minutes=fm)).strftime(
                '%Y%m%d%H%M')
            labelTime = (datetime.strptime(utc_ftime[:12], '%Y%m%d%H%M') + timedelta(hours=8)).strftime('%Y%m%d%H%M')
            input_file = f"{input_path}/GXR1KM_APCP_{start_utc_time}00/{utc_ftime}.000"
            label_file = f"{label_path}/{labelTime[:8]}/{labelTime}.000"
            if not os.path.isfile(input_file) * os.path.isfile(label_file):
                continue

            apcp_data = np.loadtxt(input_file).copy()[120:-200, 200:-160]
            if np.sum(apcp_data > 0) / apcp_data.size < 0.1:
                continue

            input_data = np.expand_dims(apcp_data, axis=0)
            input_data[input_data < 0] = 0
            x_batch = torch.from_numpy(input_data.copy()[np.newaxis, ...])
            x_batch = space_to_depth2(x_batch, 2)

            with torch.no_grad():
                out = model(x_batch.to(device=device, dtype=torch.float32))
                out = depth_to_space(out, 2).cpu().numpy().squeeze()
                # out[out < 0] = 0

            sub_data = np.maximum(apcp_data, out)

            station_data = np.nan_to_num(np.loadtxt(label_file))

            tsx = ts_station(apcp_data, station_data, idxs).round(2)
            tss = ts_station(sub_data, station_data, idxs).round(2)

            apcp_score.append(tsx)
            sub_score.append(tss)
    _tsx = np.nanmean(apcp_score, axis=0).round(4)
    _tsy = np.nanmean(sub_score, axis=0).round(4)

    alltsx[i] = _tsx
    alltsy[i] = _tsy

    print(f"{first_bj_time[:8]}-{fm}, "
          f"{_tsx.round(4).tolist()}, \n"
          f" , {_tsy.round(4).tolist()} \n")
