# encoding: utf-8
import numpy as np
from scipy.ndimage import gaussian_filter1d


# Author     : forward4<liang.ma@siat.ac.cn>
# Datetime   :
# Project    : monkey_neuralDecoding
# File       : spikeProcessor.py
# description: 文件说明

class SpikeProcessor:
    def __init__(self, n_points, bin_size=50, fs=20000, **kwargs):
        """
        :param n_points: the number of sampling points to use for binning
        :param bin_size: the time of a bin at milliseconds
        :param fs:
        :param kwargs:
        """
        self.n_points = n_points
        self.bin_size = bin_size / 1000
        self.fs = fs
        self.kwargs = kwargs

    def binning(self, spk_evt):
        """
        :param spk_evt: list or tuple, spiking events
        :return:
        """
        n_ch = len(spk_evt)
        _spk_evt = np.zeros((n_ch, self.n_points), dtype="int")

        bin_win = int(self.fs * self.bin_size)

        for i in range(n_ch):
            _spk_evt[i][spk_evt[i]] = 1

        n_bins = int(np.ceil(self.n_points / bin_win))
        binned_firing_rates = np.zeros((n_ch, n_bins), dtype="float")
        for bid in range(n_bins):
            s = bid * bin_win
            e = s + bin_win

            if e >= self.n_points:
                e = self.n_points - 1

            _cur_bin_size = (e - s) / self.fs

            binned_firing_rates[:, bid] = np.sum(_spk_evt[:, s:e], axis=1) / _cur_bin_size

        return binned_firing_rates

    def smoothing(self, firing_rates):
        """
        :param firing_rates: np.ndarray
        :return:
        """
        n_ch = firing_rates.shape[0]
        sd = self.kwargs.get("sd", 0.25)
        smoothed_fr = np.zeros_like(firing_rates, dtype="float")

        for i in range(n_ch):
            smoothed_fr[i] = gaussian_filter1d(firing_rates[i], sigma=sd)

        return smoothed_fr
