#!/usr/bin/env python
import numpy as np
from BinnedAnalysis import binnedAnalysis
from SummedLikelihood import SummedLikelihood
from UpperLimits import UpperLimits as UL

def get_results(sumlike, outfile='results_sumLike.dat'):
    'Codes belows are adapted from the one of Y-F, Liang'
    import pprint
    energies = sumlike.components[0].energies
    emin, emax = energies.min(), energies.max()

    dicttot = {"logLikelihood": sumlike.logLike.value()}
    try:
        dicttot['zEDM'] = sumlike.optObject.getDistance()
    except AttributeError:
        pass

    try:
        dicttot['zReturnCode'] = sumlike.optObject.getRetCode()
    except AttributeError:
        pass

    try:
        dicttot['zFitQuality'] = sumlike.optObject.getQuality()
    except AttributeError:
        pass

    counts = 0
    for source in sumlike.sourceNames():
        if sumlike[source].src.fixedSpectrum():
            counts += sumlike.NpredValue(source)
            continue

        dict_ = {}
        npred = sumlike.NpredValue(source)
        ts = sumlike.Ts(source)
        flux_ = sumlike.flux(source, emin=emin, emax=emax, energyFlux=False)
        fluxErr_ = sumlike.fluxError(source, emin=emin, emax=emax, energyFlux=False)
        counts += npred
        print '[%s] TS = %s ' % (source, ts)

        dict_['TS value'] = str(ts)
        dict_['Flux'] = "%.5e +/- %.5e" %(flux_, fluxErr_)
        dict_['NPred'] = str(npred)

        for pname in sumlike[source].funcs['Spectrum'].paramNames:
            error_ = sumlike[source].funcs['Spectrum'].params[pname].error()
            value_ = sumlike[source].funcs['Spectrum'].params[pname].value()
            dict_[pname] = "%f +/- %f" %(value_, error_)

            bounds_ = sumlike[source].funcs['Spectrum'].getParam(pname).getBounds()
            try:
                distToLower = abs((value_ - bounds_[0])/bounds_[0])
            except ZeroDivisionError:
                distToLower = abs(value_)
            try:
                distToUpper = abs((value_ - bounds_[1])/bounds_[1])
            except ZeroDivisionError:
                distToUpper = abs(value_)

            if distToLower < 0.1:
                print ('>>> %s[%s] close to lower limit[%s]' %
                       (pname, value_, bounds_[0]))
            if distToUpper < 0.1:
                print ('>>> %s[%s] close to upper limit[%s]' %
                       (pname, value_, bounds_[1]))
        dicttot[source] = dict_

    f = open(outfile, 'w')
    pprint.pprint(dicttot, stream=f)
    f.close()
    return counts

def get_covar(sumlike, outfile='covariance_sumLike.dat'):
    if sumlike.covar_is_current:
        fh = open(outfile, 'w')
        print >>fh, '-'*20, 'free parameters', '-'*20
        prm_lst = []
        for prm in sumlike.params():
            if prm.parameter.isFree():
                freePrm = prm.srcName+'_'+prm.parameter.getName()
                print >>fh, '%s\t%s' % (len(prm_lst)+1, freePrm)
                prm_lst.append(freePrm)

        print >>fh, '-'*20, 'covariance matrix', '-'*20
        covar = np.array(sumlike.covariance)
        np.savetxt(fh, covar, delimiter='  ', fmt='% .3e')

        print >>fh, '-'*20, 'correlation matrix', '-'*20
        cov_diag = covar.diagonal()
        if (cov_diag < 0.).any():
            print >>fh, '[ERROR] covar.diagonal < 0'
        else:
            sigma = np.sqrt(cov_diag)
            corr = covar / (sigma * sigma.reshape(-1, 1))
            np.savetxt(fh, corr, delimiter='  ', fmt='% .3f')

            print >>fh, '='*20, 'correlation check', '='*20
            triu = np.abs(np.triu(corr, 1))

            print >>fh, '.'*9, 'Very strong correlation (|corr| >= 0.8)', '.'*9
            indics = (triu >= 0.8).nonzero()
            for i, j in zip(indics[0], indics[1]):
                print >>fh, '%s <-> %s : %s' % (prm_lst[i], prm_lst[j], corr[i, j])

            print >>fh, '.'*9, 'Strong correlation (0.6 <= |corr| < 0.8)', '.'*8
            indics = ((triu < 0.8)*(triu >= 0.6)).nonzero()
            for i, j in zip(indics[0], indics[1]):
                print >>fh, '%s <-> %s : %s' % (prm_lst[i], prm_lst[j], corr[i, j])
        fh.close()
    else:
        print '[WARN] No covariance is available!'

def mySumLike(srcMaps, expCube, binnedExpMap, srcModel, irfs, formatter,
              saveFolder='./',
              tol1=0.05, tol2=1.e-5):
    'Main Program'
    import os
    summed_like = SummedLikelihood()
    if not formatter:
        raise IOError('Formatter is empty!')

    for i in xrange(4):
        folder = formatter % i
        if not folder:
            continue
        elif not folder.endswith('/'):
            folder += '/'

        if os.path.exists(folder):
            print 'Add models in %s into sumLike!' % folder
            srcmaps_file = folder + srcMaps
            binnedExpMap_file = folder + binnedExpMap

            like = binnedAnalysis(irfs=irfs, expcube=expCube, srcmdl=srcModel, optimizer='MINUIT',
                                  cmap=srcmaps_file, bexpmap=binnedExpMap_file)
            summed_like.addComponent(like)

    if len(summed_like.components) < 1:
        raise RuntimeError('No model is added!')

    if not saveFolder:
        saveFolder = './'
    elif not saveFolder.endswith('/'):
        saveFolder += '/'

    summed_like.fit(optimizer='MINUIT', tol=tol1)
    summed_like.writeXml(saveFolder + 'model_sumLike_pass1.xml')
    print 'Save pass1 Model to ' + saveFolder + 'model_sumLike_pass1.xml'

    summed_like.fit(covar=True, optimizer='MINUIT', tol=tol2)
    summed_like.writeXml(saveFolder + 'model_sumLike_pass2.xml')
    print 'Save pass2 Model to ' + saveFolder + 'model_sumLike_pass2.xml'

    print '[INFO] Total TS: ', summed_like.logLike.value()
    print '[INFO] Return Code: ', summed_like.optObject.getRetCode()
    get_results(summed_like, outfile=saveFolder + 'results_sumLike_MINUIT.dat')
    get_covar(summed_like, outfile=saveFolder + 'covariance_sumLike.dat')
  # summed_like.writeCountsSpectra(saveFolder + 'counts_spectra.fits') # not implemented


def cli():
    import argparse

    parser = argparse.ArgumentParser(description='This is a binned likelihood analysis tools based on Fermi pyLikelihood package')
    parser.add_argument("srcMaps", type=str, help='Source Maps')
    parser.add_argument("expCube", type=str, help='Live Time Cube')
    parser.add_argument("binnedExpMap", type=str, help='Exposure Cube')
    parser.add_argument("srcModel", type=str, help='model.xml')
    parser.add_argument("irfs", type=str, help='IRFs')
    parser.add_argument("fmt", type=str, help='The formatter of the model folders.')
    parser.add_argument("-saveFolder", type=str, default='./', help='Where to save output files')
    parser.add_argument("-tol1", type=float, default=0.05, help='The tolerance of the first fit')
    parser.add_argument("-tol2", type=float, default=1.e-5, help='The tolerance of the second fit')
    args = parser.parse_args()

    mySumLike(args.srcMaps, args.expCube, args.binnedExpMap, args.srcModel, args.irfs, args.fmt,
              args.saveFolder, tol1=args.tol1, tol2=args.tol2)

if __name__ == '__main__': cli()
