import fast_network_analysis as fna
import single_cell_analysis as ssa

import numpy as np
import pandas as pd

from scipy import *
import scipy.io as io

from brian import *

#import pylab as pyl
import matplotlib.pyplot as plt
import shelve
import pickle
import time
import shutil
from datetime import datetime
import sys

# ---------------- load the file
def name_file():
    temp=time.localtime(time.time())
    filename=str(temp[0])+'-'+str(temp[1])+'-'+str(temp[2])+'-'+str(temp[3])+'-'+str(temp[4])

    return filename

def loadSpikes(filename,folder='def'):
    if folder=='def':
            folder='../datasave/'
    

    pathfile=folder+filename
    

    test=shelve.open(pathfile+'_sp-1.db',protocol=-2)
    test=test['spikes']

    return test

def loadParam(filename,folder='def'):
    if folder=='def':
            folder='../datasave/'
    
    pathfile=folder+filename
    
    test=shelve.open(pathfile+'-param.db',protocol=-2)
    return test

def loadRates(filename,folder='def'):
    if folder=='def':
            folder='../datasave/'
    
    pathfile=folder+filename
    
    test=shelve.open(pathfile+'_rate-1.db',protocol=-2)
    return test

def loadMonitors(filename,folder):
    
    pathfile=folder+filename
    
    out_v_e=shelve.open(pathfile+'_v_e-1.db',protocol=-2)
    out_ge_e=shelve.open(pathfile+'_ge_e-1.db',protocol=-2)
    out_gi_e=shelve.open(pathfile+'_gi_e-1.db',protocol=-2)

    out_v_c=shelve.open(pathfile+'_v_c-1.db',protocol=-2)
    out_ge_c=shelve.open(pathfile+'_ge_c-1.db',protocol=-2)
    out_gi_c=shelve.open(pathfile+'_gi_c-1.db',protocol=-2)

    out_v_i=shelve.open(pathfile+'_v_i-1.db',protocol=-2)
    out_ge_i=shelve.open(pathfile+'_ge_i-1.db',protocol=-2)
    out_gi_i=shelve.open(pathfile+'_gi_i-1.db',protocol=-2)
    
    timestep=0.0001

    time=linspace(timestep,timestep*size(out_v_e['voltage'],axis=1),size(out_v_e['voltage'],axis=1))

    out_e={}
    out_c={}
    out_i={}
    
    out_e['voltage']=out_v_e['voltage']
    out_v_e=None
    out_e['ge']=out_ge_e['ge']
    out_ge_e=None
    out_e['gi']=out_gi_e['gi']
    out_gi_e=None

    out_c['voltage']=out_v_c['voltage']
    out_v_c=None
    out_c['ge']=out_ge_c['ge']
    out_ge_c=None
    out_c['gi']=out_gi_c['gi']
    out_gi_c=None

    out_i['voltage']=out_v_i['voltage']
    out_v_i=None
    out_i['ge']=out_ge_i['ge']
    out_ge_i=None
    out_i['gi']=out_gi_i['gi']
    out_gi_i=None

    return out_e, out_c, out_i, time

def loadSpikes_AER(filename,folder):
    pathfile=folder+filename

    params=shelve.open(pathfile+'-param.db',protocol=-2)
    nruns=params['simpar']['Nruns']
    
    neulist=np.array([])
    firtimelist=np.array([])

    nruns_v=np.arange(1,nruns+1-1)
    for kk in nruns_v:
        print kk
        neu_t, firtime_t = load_aer(pathfile+'-'+str(kk)+'.aedat')
        neulist=np.concatenate([neulist,neu_t])
        firtimelist=np.concatenate([firtimelist,firtime_t])
    
    

    sp_dict={}
    
    start_time = time.time()
    for ii in range(0,5000):
        
        sp_dict[ii]=firtimelist[neulist==ii]

        if ii%100 == 0:
            print "100 neurons dict time:", time.time() - start_time, "seconds"

    

    

    return sp_dict
    

    #group = SpikeGeneratorGroup(5000, (addr, timestamps))
    #newM = SpikeMonitor(group)
    #run(1000*second)

    

#    return addr, timestamps
    

def saveMatformat_r(filename,folder,vtrace):
    import scipy.io as io
    pathfile=folder+filename
    
    io.savemat(pathfile+'_vr_mat.mat', mdict={'vtrace': vtrace},oned_as='column')
 
def saveMatformat_c(filename,folder,vtrace):
    import scipy.io as io
    pathfile=folder+filename
    
    io.savemat(pathfile+'_vc_mat.mat', mdict={'vtrace': vtrace},oned_as='column')

def saveCorr(filename,filename_analysis,spcov_mean):
    folder='../data_analysed/'
        
    pathfile=folder+filename+'-anal-'+filename_analysis

    s=shelve.open(pathfile+'-corr.db',writeback=True,protocol=-2)
    
    s['c_anal']=spcov_mean    
    s.close()
    #import pdb; pdb.set_trace()

     
    f=open('../logsave/anal_log.txt','a')
    print f
    st=str(filename)+'-anal-'+ str(filename_analysis) + '\t' + 'corr_analysis on' + str(spcov_mean['param_ana']['npairs']) + 'pairs\n' 
    f.write(st)
    f.close()

def saveLargeEvents(filename,filename_analysis,large_event,description):
    folder='../data_analysed/'
        
    pathfile=folder+filename+'-anal-largeevent-'+filename_analysis+description

    s=shelve.open(pathfile+'.db',writeback=True,protocol=-2)
    
    s['largeevent']=large_event    
    s.close()

    f=open('../logsave/anal_log.txt','a')
    print f
    st=str(filename)+'-anal-largeevent-'+ str(filename_analysis)+ description + '\n' 
    f.write(st)
    f.close()
    


def recover_voltage(population,filename,folder='def'):
    if folder=='def':
            folder='../datasave/'
    
    pathfile=folder+filename
    start_time=time.time()

    if population=='exc':
        ge=shelve.open(pathfile+'_ge_e-1.db',protocol=-2)
        gi=shelve.open(pathfile+'_gi_e-1.db',protocol=-2)
    elif population=='clu':
        ge=shelve.open(pathfile+'_ge_c-1.db',protocol=-2)
        gi=shelve.open(pathfile+'_gi_c-1.db',protocol=-2)
    elif population=='inh':
        ge=shelve.open(pathfile+'_ge_i-1.db',protocol=-2)
        gi=shelve.open(pathfile+'_gi_i-1.db',protocol=-2)


    ge=ge['ge'].transpose()
    gi=gi['gi'].transpose()
    param=loadParam(filename)
    #import pdb; pdb.set_trace()
    dt=array(param['simpar']['Monitor_Clock'])
    C=param['paspar']['C']
    Gl=param['paspar']['gl']
    El=param['paspar']['El']
    Ee=param['paspar']['Ee']
    Ei=param['paspar']['Ei']
    
    vm=zeros(shape(ge))
    
    
    vm[:,0]=-60

    ii=-1
    while ii<size(vm,axis=1)-2:
        ii=ii+1
        #import pdb; pdb.set_trace()
        dv=dt/C * Gl*((El-vm[:,ii]) + ge[:,ii]*(Ee-vm[:,ii]) + gi[:,ii]*(Ei-vm[:,ii]))
        vm[:,ii+1]=vm[:,ii]+dv


    print "Total time:", time.time() - start_time, "seconds"
    return vm/1000


def large_event_network(filename,vtrace,spdict,eventsize,celltype,spike_cut='yes',verbose='none'):
    #if verbose = 'none', no output is given
    #if verbose = 'plot', plots appear
    
    start_time=time.time()

    param=loadParam(filename)
    
    if celltype=='exc':
        spcounter_bias=0
    elif celltype=='clu':
        spcounter_bias=param['netpar']['Ne']
    elif celltype=='inh':
        spcounter_bias=param['netpar']['Ne']+param['netpar']['Nc']
        
    dt=0.0001

    ntraces=size(vtrace,axis=0)
    spcounter=arange(0,ntraces)+spcounter_bias
    
    nbins=size(vtrace,axis=1)
    times=linspace(dt,dt*nbins,nbins)

    eventsize_vec=arange(eventsize,0.015,0.0005)

    bigevent_local=[]
    #import pdb; pdb.set_trace()
    for ii in spcounter:
        
        if spike_cut != 'yes':
            spdict[ii]=[]
        

        bigevents=ssa.search_big_events(vtrace[ii-spcounter_bias],times,spdict[ii],eventsize,0.005)
               
        bigevent_local_t = big_event_preparebin(eventsize_vec,bigevents['ampli'],bigevents['spiketrigger'],times)
        
        bigevent_local.append(bigevent_local_t)
    
    bigevent_local=array(bigevent_local)
    bigevent_mean=bigevent_local.mean(axis=0)
    bigevent_std=bigevent_local.std(axis=0) / np.sqrt(size(bigevent_local,axis=0))

    print "Total time:", time.time() - start_time, "seconds"

    if verbose=='plot':
        plot_trace_spikes(times,vtrace[ii-spcounter_bias-2],spdict[ii-2])
        plot_big_event_trace(vtrace[ii-spcounter_bias],bigevents,times)
        plot_big_event_summary(eventsize_vec,bigevent_local,bigevent_mean)
    
    large_event={}

    large_event['size']= eventsize_vec
    large_event['mean']= bigevent_mean
    large_event['std']= bigevent_std

    return large_event


def correlationcalc(spikedic,rates,filename,folder,npairs,binw=1*ms,maxlag=40*ms,tlength=None):
    pc={}

    pc['maxlag']=maxlag
    pc['binw']=binw
    pc['tlength']=tlength
    pc['npairs']=npairs

    pc['rate_e_m']=mean(rates['Rate_exc'])
    pc['rate_i_m']=mean(rates['Rate_inh'])
    pc['rate_c_m']=mean(rates['Rate_clu'])

    if folder=='def':
            folder='../datasave/'
    
    pathfile=folder+filename
    param=shelve.open(pathfile+'-param.db',protocol=-2)
    #import pdb;pdb.set_trace()
    pc['nexc']=param['netpar']['Ne']
    pc['ninh']=param['netpar']['Ni']
    pc['nclu']=param['netpar']['Nc']

    pc['nestart']=0
    pc['ncstart']=pc['nexc']
    pc['nistart']=pc['nexc']+pc['nclu']

    spcov_mean={}

    spcov_mean['ee']=corr_routine(pc,spikedic,'Exc','Exc')
    spcov_mean['ei']=corr_routine(pc,spikedic,'Exc','Inh')
    spcov_mean['ii']=corr_routine(pc,spikedic,'Inh','Inh')

    spcov_mean['cc']=corr_routine(pc,spikedic,'Clu','Clu')
    spcov_mean['ce']=corr_routine(pc,spikedic,'Clu','Exc')
    spcov_mean['ci']=corr_routine(pc,spikedic,'Clu','Inh')

    lag_vec=linspace(-maxlag,maxlag,len(spcov_mean['ee']))
    spcov_mean['lag']=lag_vec
    spcov_mean['param_ana']=pc

    return spcov_mean


def plot_correlations(spcov_mean):
    plot(spcov_mean['lag'],spcov_mean['ee'],'b')
    plot(spcov_mean['lag'],spcov_mean['ei'],'g')
    plot(spcov_mean['lag'],spcov_mean['ii'],'r')

    plot(spcov_mean['lag'],spcov_mean['cc'],'b--')
    plot(spcov_mean['lag'],spcov_mean['ci'],'g--')
    plot(spcov_mean['lag'],spcov_mean['ce'],'b-.')

    legend(('E-E','E-I','I-I','C-C','C-I','C-E'))
    xlabel('Lag (s)')
    xlim([-0.01,0.01])
    show()


# ---------------- INTERNAL FUNCTIONS

def big_event_preparebin(eventsize_vec,amplitude,spiketrigger,times):
    dt=times[1]-times[0]
    
    sp_trigger,temp=array(spiketrigger).nonzero()
    amplitude=np.delete(amplitude,sp_trigger)
    
    bigevent_cumrate=zeros((eventsize_vec.size,))
    
    amplitude.sort()

    a=-1
    for ii in eventsize_vec:
        a=a+1
        #ind=(amplitude>ii).nonzero()
        #ind=next((x for x in amplitude if x>ii),size(amplitude))
        ind = np.where(amplitude>ii)
        
        if ind[0].size:
            ind=ind[0][0]
        else:            
            ind=size(amplitude)
        
        ncount=float(size(amplitude)-ind)
        bigevent_cumrate[a]=ncount/(size(times)*dt)
    
        
    bigevent_local=-diff(bigevent_cumrate,n=1,axis=0) / (eventsize_vec[1]-eventsize_vec[0]) / 1000
    bigevent_local=concatenate((bigevent_local,array([0])))

    return bigevent_local

   
def plot_big_event_trace(vtrace,bigevent,times):
    
    l1=0.003
    l2=0.005
    l3=0.008
    l4=0.010

    dt=times[1]-times[0]

    plt.plot(times,vtrace)
    
    kk=-1
    for x in bigevent['ampli']:
        kk=kk+1
                
        if (x>l1 and x<l2):
            line1=plot([times[bigevent['binstart'][kk]],times[bigevent['binstart'][kk]+bigevent['binduration'][kk]]],[vtrace[bigevent['binstart'][kk]],vtrace[bigevent['binstart'][kk]+bigevent['binduration'][kk]]])
            plt.setp(line1,'color','r')

            
    show()

def plot_big_event_summary(eventsize_vec,bigevent_local,mean_bev):

    plt.figure(figsize=(2,1.5))

    lines=plt.plot(eventsize_vec*1000,bigevent_local.T)
    line_ave=plt.plot(eventsize_vec*1000,mean_bev)
    plt.setp(line_ave,'color','r','linewidth',1.0)
    
    plt.xlabel('Event size (mV)')
    plt.ylabel('Rate (Hz/mV)')
    show()

def plot_comparison_summary(large_event_1,large_event_2):
    
    eventsize_vec_r=large_event_1['size']
    bigevent_mean_r=large_event_1['mean']
    bigevent_std_r=large_event_1['std']

    eventsize_vec_c=large_event_2['size']
    bigevent_mean_c=large_event_2['mean']
    bigevent_std_c=large_event_2['std']



    font_size=8
    fig=plt.figure(figsize=(1.5,1.3))
    ax1 = fig.add_subplot(111)


    line_ave_r=ax1.plot(eventsize_vec_r*1000,bigevent_mean_r)
    plt.setp(line_ave_r,'color','b','linewidth',1.0)
    line_std_r=ax1.fill_between(
        eventsize_vec_r*1000, bigevent_mean_r-bigevent_std_r, bigevent_mean_r+bigevent_std_r,
        facecolor='blue',edgecolor='none',alpha=0.5
        )

    line_ave_c=ax1.plot(eventsize_vec_c*1000,bigevent_mean_c)
    plt.setp(line_ave_c,'color','r','linewidth',1.0)
    line_std_r=ax1.fill_between(
        eventsize_vec_r*1000, bigevent_mean_c-bigevent_std_c, bigevent_mean_c+bigevent_std_c,
        facecolor='red',edgecolor='none',alpha=0.5
        )

    #Setting up the axes
    ax1.spines["right"].set_visible(False)
    ax1.spines["top"].set_visible(False)
    ax1.axis([0., 10., 0., 40.])
    

    ax1.tick_params(axis='both', direction='out', labelsize=font_size)
    ax1.get_xaxis().tick_bottom()   # remove unneeded ticks 
    ax1.get_yaxis().tick_left()

    xticks([0, 5, 10])
    yticks([0, 20, 40])

    
    #ax1.tick_params(axis='both', labelsize=font_size)

    plt.xlabel('Event size (mV)',fontsize=font_size)
    plt.ylabel('Rate (Hz/mV)',fontsize=font_size)

    #Making the axis in the center of the canvas
    subplots_adjust(bottom=0.35,left=0.3)

    fig.savefig('test.eps',format='eps',transparent=True)
    
    # FIND HOW TO PLACE THE AXES WHERE YOU WANT IN THE CANVAS

    show()
    

def plot_trace_spikes(times,vtrace,spiketime):
    dt=times[1]-times[0]

    spikebin=np.round(spiketime/dt).astype(int)

    plot(times,vtrace)
    plot(times[spikebin-1],vtrace[spikebin-1],'ro')

    show()

def plotPopRates(rates):
    dt=array(rates['simpar']['Monitor_Clock'])
    #import pdb; pdb.set_trace()
    v_len=len(rates['Rate_exc'])
    times=np.linspace(0,(v_len-1)*dt,v_len)

    

    plot(times,rates['Rate_exc'])
    plot(times,rates['Rate_clu'])
    plot(times,rates['Rate_inh'])

    show()

def corr_routine(pc,spikedic,pop1,pop2):
    #-- getting the right parameters according to the populations chosen
    if pop1=='Exc':
        rate_1_m=pc['rate_e_m']
        n1=pc['nexc']
        n1start=pc['nestart']
    elif pop1=='Clu':
        rate_1_m=pc['rate_c_m']
        n1=pc['nclu']
        n1start=pc['ncstart']
    elif pop1=='Inh':
        rate_1_m=pc['rate_i_m']
        n1=pc['ninh']
        n1start=pc['nistart']

    if pop2=='Exc':
        rate_2_m=pc['rate_e_m']
        n2=pc['nexc']
        n2start=pc['nestart']
    elif pop2=='Clu':
        rate_2_m=pc['rate_c_m']
        n2=pc['nclu']
        n2start=pc['ncstart']
    elif pop2=='Inh':
        rate_2_m=pc['rate_i_m']
        n2=pc['ninh']
        n2start=pc['nistart']
        

    start_time = time.time()
    ncorr=-1
    nii=0
    spcov_12=[]
    
    print pop1+'-'+pop2

    

    npairs1_eff=min(pc['npairs'],n1)
    npairs2_eff=min(pc['npairs'],n2)
   
    spcov_mean_12_p=zeros(shape(CCVF(spikedic[0],spikedic[0],width=pc['maxlag'],bin=pc['binw'],T=pc['tlength'])))

    toti=0
    for ii in range(n1start,n1start+npairs1_eff):
        nii=nii+1
        print nii
        
        for jj in range(n2start+nii,n2start+npairs2_eff):
            toti=toti+1
            x=CCVF(spikedic[ii],spikedic[jj],width=pc['maxlag'],bin=pc['binw'],T=pc['tlength'])
            if np.isnan(np.min(x))==False:
                spcov_mean_12_p= (spcov_mean_12_p*(toti-1) + x)/toti
        
    spcov_mean_12_p=spcov_mean_12_p/(rate_1_m*rate_2_m)

    print "Correlation:", time.time() - start_time, "seconds"

    return spcov_mean_12_p
