import os
import numpy as np
import argparse
import configparser
import torch
import torch.nn as nn
import torch.nn.functional as F
import zipfile



def load_metr_la_data():
    """
    加载原始数据，并归一化后返回
    """
    A = np.load("./data/B.npy")  # A=float32 x=float64
    B = np.load("./data/all_data_0224.npz", allow_pickle=True)
    X = B['arr_0'].transpose((0, 2, 1))
    X = X.astype(np.float32)#（17520,4,303）
    X = X[:4380, :, :]

    # Normalization using Z-score method
    means = np.mean(X, axis=(0, 2))
    X = X - means.reshape(1, -1, 1)
    stds = np.std(X, axis=(0, 2))
    X = X / stds.reshape(1, -1, 1)

    return A, X, means, stds


def get_normalized_adj(A):
    """
    Returns the degree normalized adjacency matrix.
    """
    A = A + np.diag(np.ones(A.shape[0], dtype=np.float32))
    D = np.array(np.sum(A, axis=1)).reshape((-1,))
    D[D <= 10e-5] = 10e-5    # Prevent infs
    diag = np.reciprocal(np.sqrt(D))
    A_wave = np.multiply(np.multiply(diag.reshape((-1, 1)), A),
                         diag.reshape((1, -1)))
    return A_wave


def search_data(length, num_of_depend, label_start_idx,
                num_timesteps_output, units, points_per_hour):
    """
    制作day samples的时间切片
    """
    if label_start_idx + num_timesteps_output > length:
        return None

    x_idx = []
    for i in range(1, num_of_depend + 1):
        start_idx = label_start_idx - points_per_hour * units * i
        end_idx = start_idx + num_timesteps_output
        if start_idx >= 0:
            x_idx.append((start_idx, end_idx))
        else:
            return None

    #print(x_idx)
    return x_idx


def get_sample_indices(Data, num_of_weeks, label_start_idx, num_timesteps_output):
    """
    获取一个 week samples
    """
    week_sample = None
    if label_start_idx + num_timesteps_output > Data.shape[0]:
        return week_sample, None

    if num_of_weeks > 0:
        week_indices = search_data(Data.shape[0], num_of_weeks,
                                   label_start_idx, num_timesteps_output,
                                   7 * 24, points_per_hour=2)
        print(week_indices)
        if not week_indices:
            return None, None, None, None

        week_sample = np.concatenate([Data[i: j,:,:]for i, j in week_indices], axis=0)


    target = Data[label_start_idx: label_start_idx + num_timesteps_output,:,:]

    return week_sample, target


def generate_dataset(Data,num_of_weeks, num_timesteps_output):
    """
    集合所有week samples的时间切片
    """
    all_sample, all_target = [], []
    for idx in range(Data.shape[0]):

        sample= get_sample_indices(Data, num_of_weeks, idx, num_timesteps_output)
        if ((sample[0] is None) and (sample[1] is None)):
            continue
        week_sample, week_target = sample

        if num_of_weeks > 0:
            week_sample = week_sample.transpose((2,0,1))  # (1,N,F,T)1
            all_sample.append(week_sample)

        week_target = week_target.transpose((2, 0, 1))[:, :, 0]  # (1,N,T)
        all_target.append(week_target)


    return torch.from_numpy(np.array(all_sample)), \
           torch.from_numpy(np.array(all_target))
    #all_sample=(1282,303,12,4),
    #all_target=(1282,303,3).
