import fast_network_analysis as fna
import single_cell_analysis as ssa

import numpy as np
import pandas as pd

from scipy import *
import scipy.io as io

from brian import *

#import pylab as pyl
import matplotlib.pyplot as plt
import shelve
import pickle
import time
import shutil
from datetime import datetime
import sys

# ---------------- load the file
def loadSpikes(filename,folder='def'):
    if folder=='def':
            folder='../datasave/'
    start_time = time.time()

    pathfile=folder+filename
    

    test=shelve.open(pathfile+'_sp-1.db',protocol=-2)
    test=test['spikes']

    return test

def loadRates(filename,folder='def'):
    if folder=='def':
            folder='../datasave/'
    
    pathfile=folder+filename
    
    test=shelve.open(pathfile+'_rate-1.db',protocol=-2)
    return test

def loadMonitors(filename,folder):
    
    pathfile=folder+filename
    
    out_v_e=shelve.open(pathfile+'_v_e-1.db',protocol=-2)
    out_ge_e=shelve.open(pathfile+'_ge_e-1.db',protocol=-2)
    out_gi_e=shelve.open(pathfile+'_gi_e-1.db',protocol=-2)

    out_v_c=shelve.open(pathfile+'_v_c-1.db',protocol=-2)
    out_ge_c=shelve.open(pathfile+'_ge_c-1.db',protocol=-2)
    out_gi_c=shelve.open(pathfile+'_gi_c-1.db',protocol=-2)

    out_v_i=shelve.open(pathfile+'_v_i-1.db',protocol=-2)
    out_ge_i=shelve.open(pathfile+'_ge_i-1.db',protocol=-2)
    out_gi_i=shelve.open(pathfile+'_gi_i-1.db',protocol=-2)
    
    timestep=0.0001

    time=linspace(timestep,timestep*size(out_v_e['voltage'],axis=1),size(out_v_e['voltage'],axis=1))

    out_e={}
    out_c={}
    out_i={}
    
    out_e['voltage']=out_v_e['voltage']
    out_v_e=None
    out_e['ge']=out_ge_e['ge']
    out_ge_e=None
    out_e['gi']=out_gi_e['gi']
    out_gi_e=None

    out_c['voltage']=out_v_c['voltage']
    out_v_c=None
    out_c['ge']=out_ge_c['ge']
    out_ge_c=None
    out_c['gi']=out_gi_c['gi']
    out_gi_c=None

    out_i['voltage']=out_v_i['voltage']
    out_v_i=None
    out_i['ge']=out_ge_i['ge']
    out_ge_i=None
    out_i['gi']=out_gi_i['gi']
    out_gi_i=None

    return out_e, out_c, out_i, time

def loadSpikes_AER(filename,folder):
    pathfile=folder+filename

    params=shelve.open(pathfile+'-param.db',protocol=-2)
    nruns=params['simpar']['Nruns']
    
    neulist=np.array([])
    firtimelist=np.array([])

    nruns_v=np.arange(1,nruns+1-1)
    for kk in nruns_v:
        print kk
        neu_t, firtime_t = load_aer(pathfile+'-'+str(kk)+'.aedat')
        neulist=np.concatenate([neulist,neu_t])
        firtimelist=np.concatenate([firtimelist,firtime_t])
    
    

    sp_dict={}
    
    start_time = time.time()
    for ii in range(0,5000):
        
        sp_dict[ii]=firtimelist[neulist==ii]

        if ii%100 == 0:
            print "100 neurons dict time:", time.time() - start_time, "seconds"

    

    import pdb; pdb.set_trace()

    return sp_dict
    

    #group = SpikeGeneratorGroup(5000, (addr, timestamps))
    #newM = SpikeMonitor(group)
    #run(1000*second)

    

#    return addr, timestamps
    

def saveMatformat_r(filename,folder,vtrace):
    import scipy.io as io
    pathfile=folder+filename
    
    io.savemat(pathfile+'_vr_mat.mat', mdict={'vtrace': vtrace},oned_as='column')
 
def saveMatformat_c(filename,folder,vtrace):
    import scipy.io as io
    pathfile=folder+filename
    
    io.savemat(pathfile+'_vc_mat.mat', mdict={'vtrace': vtrace},oned_as='column')

def large_event_network(vtrace,spdict,eventsize,celltype,verbose='none'):
    #if verbose = 'none', no output is given
    #if verbose = 'plot', plots appear

    if celltype=='regular':
        spcounter_bias=0
    elif celltype=='cluster':
        spcounter_bias=spdict['netpar']['Ne']
        
    dt=0.0001

    ntraces=size(vtrace,axis=0)
    spcounter=arange(0,ntraces)+spcounter_bias

    nbins=size(vtrace,axis=1)
    times=linspace(dt,dt*nbins,nbins)

    eventsize_vec=arange(eventsize,0.015,0.0005)

    bigevent_local=[]
    
    for ii in spcounter:

        
        bigevents=ssa.search_big_events(vtrace[ii-spcounter_bias],times,spdict['spikes'][ii],eventsize,0.005)

        
        
        bigevent_local_t = big_event_preparebin(eventsize_vec,bigevents['ampli'],bigevents['spiketrigger'],times)
        
        bigevent_local.append(bigevent_local_t)
    
    bigevent_local=array(bigevent_local)
    bigevent_mean=bigevent_local.mean(axis=0)
    bigevent_std=bigevent_local.std(axis=0) / np.sqrt(size(bigevent_local,axis=0))

    if verbose=='plot':
        plot_trace_spikes(times,vtrace[ii-spcounter_bias-25],spdict['spikes'][ii-25])
        plot_big_event_trace(vtrace[ii-spcounter_bias],bigevents,times)
        plot_big_event_summary(eventsize_vec,bigevent_local,bigevent_mean)
    
    return eventsize_vec,bigevent_mean,bigevent_std


def correlationcalc(spikedic,rates,filename,folder,npairs,binw=1*ms,maxlag=40*ms,tlength=None):
    pc={}

    pc['maxlag']=maxlag
    pc['binw']=binw
    pc['tlength']=tlength
    pc['npairs']=npairs

    pc['rate_e_m']=mean(rates['Rate_exc'])
    pc['rate_i_m']=mean(rates['Rate_inh'])
    pc['rate_c_m']=mean(rates['Rate_clu'])

    if folder=='def':
            folder='../datasave/'
    
    pathfile=folder+filename
    param=shelve.open(pathfile+'-param.db',protocol=-2)
    #import pdb;pdb.set_trace()
    pc['nexc']=param['netpar']['Ne']
    pc['ninh']=param['netpar']['Ni']
    pc['nclu']=param['netpar']['Nc']

    pc['nestart']=0
    pc['ncstart']=pc['nexc']
    pc['nistart']=pc['nexc']+pc['nclu']

    spcov_mean_ee=corr_routine(pc,spikedic,'Exc','Exc')
    spcov_mean_ei=corr_routine(pc,spikedic,'Exc','Inh')
    spcov_mean_ii=corr_routine(pc,spikedic,'Inh','Inh')

    spcov_mean_cc=corr_routine(pc,spikedic,'Clu','Clu')
    spcov_mean_ce=corr_routine(pc,spikedic,'Clu','Exc')
    spcov_mean_ci=corr_routine(pc,spikedic,'Clu','Inh')

    lag_vec=linspace(-maxlag,maxlag,len(spcov_mean_ii))

    plot(lag_vec,spcov_mean_ee,'b')
    plot(lag_vec,spcov_mean_ei,'g')
    plot(lag_vec,spcov_mean_ii,'r')

    plot(lag_vec,spcov_mean_cc,'b--')
    plot(lag_vec,spcov_mean_ci,'g--')
    plot(lag_vec,spcov_mean_ce,'b-.')

    legend(('E-E','E-I','I-I','C-C','C-I','C-E'))
    xlabel('Lag (s)')
    xlim([-0.01,0.01])
    show()

    return spcov_mean_ee, spcov_mean_ei, spcov_mean_ii
    #return mspcov_ee, mspcov_ei, mspcov_ii


# ---------------- INTERNAL FUNCTIONS

def big_event_preparebin(eventsize_vec,amplitude,spiketrigger,times):
    dt=times[1]-times[0]
    
    sp_trigger,temp=array(spiketrigger).nonzero()
    #amplitude=np.delete(amplitude,sp_trigger)
    
    bigevent_cumrate=zeros((eventsize_vec.size,))
    
    amplitude.sort()

    a=-1
    for ii in eventsize_vec:
        a=a+1
        #ind=(amplitude>ii).nonzero()
        #ind=next((x for x in amplitude if x>ii),size(amplitude))
        ind = np.where(amplitude>ii)
        
        if ind[0].size:
            ind=ind[0][0]
        else:
            
            ind=size(amplitude)
        
        ncount=float(size(amplitude)-ind)
        bigevent_cumrate[a]=ncount/(size(times)*dt)
    
        
    bigevent_local=-diff(bigevent_cumrate,n=1,axis=0) / (eventsize_vec[1]-eventsize_vec[0]) / 1000
    bigevent_local=concatenate((bigevent_local,array([0])))

    return bigevent_local

   
def plot_big_event_trace(vtrace,bigevent,times):
    
    l1=0.003
    l2=0.005
    l3=0.008
    l4=0.010

    dt=times[1]-times[0]

    plt.plot(times,vtrace)
    
    kk=-1
    for x in bigevent['ampli']:
        kk=kk+1
                
        if (x>l1 and x<l2):
            line1=plot([times[bigevent['binstart'][kk]],times[bigevent['binstart'][kk]+bigevent['binduration'][kk]]],[vtrace[bigevent['binstart'][kk]],vtrace[bigevent['binstart'][kk]+bigevent['binduration'][kk]]])
            plt.setp(line1,'color','r')

            
    show()

def plot_big_event_summary(eventsize_vec,bigevent_local,mean_bev):

    plt.figure(figsize=(2,1.5))

    lines=plt.plot(eventsize_vec*1000,bigevent_local.T)
    line_ave=plt.plot(eventsize_vec*1000,mean_bev)
    plt.setp(line_ave,'color','r','linewidth',1.0)
    
    plt.xlabel('Event size (mV)')
    plt.ylabel('Rate (Hz/mV)')
    show()

def plot_comparison_summary(eventsize_vec_r,bigevent_mean_r,bigevent_std_r,eventsize_vec_c,bigevent_mean_c,bigevent_std_c):
    
    font_size=8
    fig=plt.figure(figsize=(1.5,1.3))
    ax1 = fig.add_subplot(111)


    line_ave_r=ax1.plot(eventsize_vec_r*1000,bigevent_mean_r)
    plt.setp(line_ave_r,'color','b','linewidth',1.0)
    line_std_r=ax1.fill_between(
        eventsize_vec_r*1000, bigevent_mean_r-bigevent_std_r, bigevent_mean_r+bigevent_std_r,
        facecolor='blue',edgecolor='none',alpha=0.5
        )

    line_ave_c=ax1.plot(eventsize_vec_c*1000,bigevent_mean_c)
    plt.setp(line_ave_c,'color','r','linewidth',1.0)
    line_std_r=ax1.fill_between(
        eventsize_vec_r*1000, bigevent_mean_c-bigevent_std_c, bigevent_mean_c+bigevent_std_c,
        facecolor='red',edgecolor='none',alpha=0.5
        )

    #Setting up the axes
    ax1.spines["right"].set_visible(False)
    ax1.spines["top"].set_visible(False)
    ax1.axis([0., 10., 0., 40.])
    

    ax1.tick_params(axis='both', direction='out', labelsize=font_size)
    ax1.get_xaxis().tick_bottom()   # remove unneeded ticks 
    ax1.get_yaxis().tick_left()

    xticks([0, 5, 10])
    yticks([0, 20, 40])

    
    #ax1.tick_params(axis='both', labelsize=font_size)

    plt.xlabel('Event size (mV)',fontsize=font_size)
    plt.ylabel('Rate (Hz/mV)',fontsize=font_size)

    #Making the axis in the center of the canvas
    subplots_adjust(bottom=0.35,left=0.3)

    fig.savefig('test.eps',format='eps',transparent=True)
    
    # FIND HOW TO PLACE THE AXES WHERE YOU WANT IN THE CANVAS

    show()
    

def plot_trace_spikes(times,vtrace,spiketime):
    dt=times[1]-times[0]

    spikebin=np.round(spiketime/dt).astype(int)

    plot(times,vtrace)
    plot(times[spikebin-1],vtrace[spikebin-1],'ro')

def plotPopRates(rates):
    dt=array(rates['simpar']['Monitor_Clock'])
    #import pdb; pdb.set_trace()
    v_len=len(rates['Rate_exc'])
    times=np.linspace(0,(v_len-1)*dt,v_len)

    

    plot(times,rates['Rate_exc'])
    plot(times,rates['Rate_clu'])
    plot(times,rates['Rate_inh'])

    show()

def corr_routine(pc,spikedic,pop1,pop2):
    #-- getting the right parameters according to the populations chosen
    if pop1=='Exc':
        rate_1_m=pc['rate_e_m']
        n1=pc['nexc']
        n1start=pc['nestart']
    elif pop1=='Clu':
        rate_1_m=pc['rate_c_m']
        n1=pc['nclu']
        n1start=pc['ncstart']
    elif pop1=='Inh':
        rate_1_m=pc['rate_i_m']
        n1=pc['ninh']
        n1start=pc['nistart']

    if pop2=='Exc':
        rate_2_m=pc['rate_e_m']
        n2=pc['nexc']
        n2start=pc['nestart']
    elif pop2=='Clu':
        rate_2_m=pc['rate_c_m']
        n2=pc['nclu']
        n2start=pc['ncstart']
    elif pop2=='Inh':
        rate_2_m=pc['rate_i_m']
        n2=pc['ninh']
        n2start=pc['nistart']
        

    start_time = time.time()
    ncorr=-1
    nii=0
    spcov_12=[]
    
    print pop1+'-'+pop2

    

    npairs1_eff=min(pc['npairs'],n1)
    npairs2_eff=min(pc['npairs'],n2)
   
    for ii in range(n1start,n1start+npairs1_eff):
        nii=nii+1
        print nii
        
        for jj in range(n2start+nii,n2start+npairs2_eff):
            
            spcov_12.append(CCVF(spikedic[ii],spikedic[jj],width=pc['maxlag'],bin=pc['binw'],T=pc['tlength']))
        
    spcov_12=array(spcov_12)
    spcov_12=np.ma.masked_array(spcov_12,isnan(spcov_12))

    spcov_mean_12=np.mean(spcov_12,axis=0)
    spcov_mean_12=spcov_mean_12/(rate_1_m*rate_2_m)

    spcov_12=None

    print "Correlation:", time.time() - start_time, "seconds"

    return spcov_mean_12
