import numpy as np
import pickle, os
from scipy import stats
from scipy.io import loadmat
from datetime import datetime
# from wiener_filter import format_data_from_trials, train_wiener_filter, test_wiener_filter

from sklearn.metrics import r2_score
from matplotlib import pyplot as plt
from torch.utils.data import DataLoader
import torch
import torch.nn as nn
import torch.optim as optim
from sklearn.metrics import r2_score
from torch.utils.data import DataLoader, TensorDataset, Dataset, random_split
from sklearn.model_selection import KFold

class MyDataset(Dataset):
    def __init__(self, data, labels):
        self.data = data
        self.labels = labels

    def __len__(self):
        return len(self.data)

    def __getitem__(self, idx):
        return self.data[idx], self.labels[idx]

def label_clip(labels, n=3):
    x = labels[:, 0]
    y = labels[:, 1]

    x_outlier_min = np.mean(x) - n * np.std(x)
    y_outlier_min = np.mean(y) - n * np.std(y)
    x_outlier_max = np.mean(x) + n * np.std(x)
    y_outlier_max = np.mean(y) + n * np.std(y)

    x = np.clip(x, a_min=x_outlier_min, a_max=x_outlier_max)
    y = np.clip(y, a_min=y_outlier_min, a_max=y_outlier_max)

    labels = np.column_stack((x, y))

    return labels

def label_to_polar(labels):
    r = np.sqrt(labels[:, 0] ** 2 + labels[:, 1] ** 2)
    theta = np.arctan2(labels[:, 1], labels[:, 0])
    polar_labels = np.column_stack((r,theta))

    return polar_labels

def smooth(firing_rates, kernel_type='gaussian', kernel_SD=0.07, bin_size=0.01):
    binned_spikes = firing_rates.tolist()
    smoothed = []
    kernel_hl = 3 * int(kernel_SD / bin_size)
    normalDistribution = stats.norm(0, kernel_SD)
    x = np.arange(-kernel_hl * bin_size, (kernel_hl + 1) * bin_size, bin_size)
    kernel = normalDistribution.pdf(x)
    if kernel_type == 'gaussian':
        pass
    elif kernel_type == 'half_gaussian':
        for i in range(0, int(kernel_hl)):
            kernel[i] = 0
    n_sample = np.size(binned_spikes[0])
    nm = np.convolve(kernel, np.ones((n_sample))).T[int(kernel_hl):n_sample + int(kernel_hl)]
    for each in binned_spikes:
        temp1 = np.convolve(kernel, each)
        temp2 = temp1[int(kernel_hl):n_sample + int(kernel_hl)] / nm
        smoothed.append(temp2)

    data = np.asarray(smoothed)
    return data


