from __future__ import print_function
import os
import copy
import numpy as np

try:
    from astropy import units as u
    from astropy.coordinates import SkyCoord
    from astropy.io import fits as pf
    from astropy.table import Table
except ImportError as err:
    print('[ImportError] %s!' % err)

from . import Models2
from .SMKernel import SourceModel

def GAL2CEL(l_deg, b_deg):
    """
    Convert positions in galactic coordinates to Celestrial coordinates. Return ra, dec in [deg].
    """
    skycoord = SkyCoord(l=l_deg*u.degree, b=b_deg*u.degree, frame='galactic')
    ra_deg = skycoord.fk5.ra.deg
    dec_deg = skycoord.fk5.dec.deg
    return ra_deg, dec_deg

def CEL2GAL(ra_deg, dec_deg):
    """
    Convert positions in Celestrial coordinates to galactic coordinates. Return l, b in [deg].
    """
    skycoord = SkyCoord(ra=ra_deg*u.degree, dec=dec_deg*u.degree, frame='fk5')
    l_deg = skycoord.galactic.l.deg
    b_deg = skycoord.galactic.b.deg
    return l_deg, b_deg

class XmlPtSrc(object):
    """
    Deprecating, use MySrcObj instead!
    """

    def __init__(self, xmlModel='model_input.xml', verbose=True):
        self.xmlModel = xmlModel
        self.verbose = verbose

        print('Pending deprication!')

        if verbose:
            print('Parsing xmlFile %s ...' % xmlModel.split('/')[-1])
        srcModel = SourceModel(self.xmlModel, raiseErr=True)

        self.src_names, self.src_ra, self.src_dec = [], [], []
        for src in srcModel.srcList.values():
            if src.type == 'PointSource':
                self.src_names.append(src.name)
                self.src_ra.append(src.spatialModel.RA.value)
                self.src_dec.append(src.spatialModel.DEC.value)

        self.nsources = len(self.src_names)
        self.src_ra = np.atleast_1d(self.src_ra)
        self.src_dec = np.atleast_1d(self.src_dec)

        self.src_l, self.src_b = CEL2GAL(self.src_ra, self.src_dec)
        if self.verbose:
            print('%s point sources are loaded.' % self.nsources)

    def __repr__(self):
        return 'xmlFile: %s\n nSrcs: %i' % (self.xmlModel, self.nsources)

    def loadAttr(self, xmlFile, attr='TS_value'):
        """
        Load attribute from another file!
        """
        print('Parsing xmlFile %s ...' % xmlFile.split('/')[-1])
        srcModel = SourceModel(xmlFile, raiseErr=True)
        self.__dict__[attr] = {}

        print('Loading attribute %s from xmlFile ...' % attr)
        allSrcs = srcModel.names
        for src in self.src_names:
            if src in allSrcs and hasattr(srcModel[src], attr):
                attr_value = getattr(srcModel[src], attr)
                try:
                    self.__dict__[attr][src] = float(attr_value)
                except ValueError:
                    self.__dict__[attr][src] = attr_value

    def getAttrSortedSrc(self, attr='TS_value'):
        """
        Return sorted srcname from max to min according to attr.
        """
        if not hasattr(self, attr):
            raise IOError('No attribute %s is contained in self!') % attr

        attr_dict = self.__dict__[attr]
        keys, values = np.array(attr_dict.keys(), dtype=str), np.array(attr_dict.values(), dtype=float)
        argsort = values.argsort()
        sortsrc = keys[argsort]
        return sortsrc[::-1]


class MySrcObj(object):

    def __init__(self, infile='model_input.xml', verbose=True):
        """
        infile can be the fermi fits catalog!
        """
        self.verbose = verbose

        if infile.endswith('.fits') or infile.endswith('.fit'):
            if verbose:
                print('Parsing fitsFile %s ...' % infile.split('/')[-1])

            hdulist = pf.open(infile, memmap=True)

            src_data = hdulist['LAT_Point_Source_Catalog'].data
            src_name = np.char.strip(src_data.field('Source_Name'))
            src_ra = src_data.field('RAJ2000').copy()
            src_dec = src_data.field('DEJ2000').copy()
            src_sig = src_data.field('Signif_Avg').copy()
            src_flx = src_data.field('Flux1000').copy()
            src_var = src_data.field('Variability_Index').copy()
            src_ext = np.char.strip(src_data.field('Extended_Source_Name'))

            sig_invalid = ~np.isfinite(src_sig)
            src_sig[sig_invalid] = np.inf

            flx_invalid = ~np.isfinite(src_flx)
            src_flx[flx_invalid] = np.inf

            var_invalid = ~np.isfinite(src_var)
            src_var[var_invalid] = np.inf

            dfs_mask = np.char.endswith(src_name, 'e')
            pts_mask = ~dfs_mask

            pts_name = src_name[pts_mask]
            pts_ra = src_ra[pts_mask]
            pts_dec = src_dec[pts_mask]
            pts_sig = src_sig[pts_mask]
            pts_flx = src_flx[pts_mask]
            pts_var = src_var[pts_mask]

            dfs_name = src_name[dfs_mask]
            dfs_ra = src_ra[dfs_mask]
            dfs_dec = src_dec[dfs_mask]
            dfs_file = src_ext[dfs_mask]
            dfs_sig = src_sig[dfs_mask]
            dfs_flx = src_flx[dfs_mask]
            dfs_var = src_var[dfs_mask]

            hdulist.close()
            del hdulist['LAT_Point_Source_Catalog'].data
        else:
            if verbose:
                print('Parsing xmlFile %s ...' % infile.split('/')[-1])
            srcModel = SourceModel(infile, raiseErr=True)

            pts_name, pts_ra, pts_dec, pts_sig, pts_flx, pts_var = [], [], [], [], [], []
            dfs_name, dfs_ra, dfs_dec, dfs_file, dfs_sig, dfs_flx, dfs_var = [], [], [], [], [], [], []
            for src in srcModel.srcList.values():
                if src.type == 'PointSource':
                    pts_name.append(src.name)
                    pts_ra.append(src.spatialModel.RA.value)
                    pts_dec.append(src.spatialModel.DEC.value)
                    if hasattr(src, 'TS_value'):
                        pts_sig.append(np.sqrt(float(src.TS_value)))
                    else:
                        pts_sig.append(np.inf)

                    if hasattr(src, 'Flux1000'):
                        pts_flx.append(float(src.Flux1000))
                    else:
                        pts_flx.append(np.inf)

                    if hasattr(src, 'Variability_Index'):
                        pts_var.append(float(src.Variability_Index))
                    else:
                        pts_var.append(np.inf)
                elif src.type == 'DiffuseSource':
                    dfs_name.append(src.name)
                    if src.spatialModel.type == 'SpatialMap':
                        dfs_file.append(src.spatialModel.file)
                    else:
                        dfs_file.append(None)

                    if hasattr(src.spatialModel, 'RA'):
                        dfs_ra.append(src.spatialModel.RA.value)
                    else:
                        dfs_ra.append(np.nan)

                    if hasattr(src.spatialModel, 'DEC'):
                        dfs_dec.append(src.spatialModel.DEC.value)
                    else:
                        dfs_dec.append(np.nan)

                    if hasattr(src, 'TS_value'):
                        dfs_sig.append(np.sqrt(float(src.TS_value)))
                    else:
                        dfs_sig.append(np.inf)

                    if hasattr(src, 'Flux1000'):
                        dfs_flx.append(float(src.Flux1000))
                    else:
                        dfs_flx.append(np.inf)

                    if hasattr(src, 'Variability_Index'):
                        dfs_var.append(float(src.Variability_Index))
                    else:
                        dfs_var.append(np.inf)
                else:
                    raise RuntimeError('Unknown source type %s of "%s"!'%(src.type, src.name))

        pts_l, pts_b = CEL2GAL(pts_ra, pts_dec)
        self.pts = Table([pts_name, pts_ra, pts_dec, pts_l, pts_b, pts_sig, pts_flx, pts_var],
                         names=('name', 'ra', 'dec', 'l', 'b', 'sig', 'flux', 'var'))
        self.dfs = Table([dfs_name, dfs_ra, dfs_dec, dfs_file, dfs_sig, dfs_flx, dfs_var],
                         names=('name', 'ra', 'dec', 'file', 'sig', 'flux', 'var'))

    def loadInfo(self, infile):
        """
        infile can be the fermi fits catalog!
        Load significance and variability from another xmlFile!
        """
        if infile.endswith('.fits') or infile.endswith('.fit'):
            hdulist = pf.open(infile, memmap=True)

            src_data = hdulist['LAT_Point_Source_Catalog'].data
            src_name = np.char.strip(src_data.field('Source_Name')).tolist()
            src_sig = src_data.field('Signif_Avg').copy()
            src_flx = src_data.field('Flux1000').copy()
            src_var = src_data.field('Variability_Index')

            sig_invalid = ~np.isfinite(src_sig)
            src_sig[sig_invalid] = np.inf

            flx_invalid = ~np.isfinite(src_flx)
            src_flx[flx_invalid] = np.inf

            var_invalid = ~np.isfinite(src_var)
            src_var[var_invalid] = np.inf

            nsrc = len(src_name)

            pts_name = self.pts['name'].tolist()
            idx = []
            for ptn in pts_name:
                try:
                    idx.append(src_name.index(ptn))
                except ValueError:
                    idx.append(nsrc)
            idx = np.array(idx)
            idx_fix = np.minimum(idx, nsrc-1)
            pts_sig = np.where(idx < nsrc, src_sig[idx_fix], np.inf)
            pts_flx = np.where(idx < nsrc, src_flx[idx_fix], np.inf)
            pts_var = np.where(idx < nsrc, src_var[idx_fix], np.inf)

            dfs_name = self.dfs['name'].tolist()
            idx = []
            for dfn in dfs_name:
                try:
                    idx.append(src_name.index(dfn))
                except ValueError:
                    idx.append(nsrc)
            idx = np.array(idx)
            idx_fix = np.minimum(idx, nsrc-1)
            dfs_sig = np.where(idx < nsrc, src_sig[idx_fix], np.inf)
            dfs_flx = np.where(idx < nsrc, src_flx[idx_fix], np.inf)
            dfs_var = np.where(idx < nsrc, src_var[idx_fix], np.inf)

            hdulist.close()
            del hdulist['LAT_Point_Source_Catalog'].data
        else:
            pts_sig, pts_flx, pts_var = [], [], []
            dfs_sig, dfs_flx, dfs_var = [], [], []

            sm2 = SourceModel(infile, raiseErr=True)
            for sname in self.pts['name']:
                if sname in sm2.srcList.keys():
                    src = sm2[sname]
                    if hasattr(src, 'TS_value'):
                        pts_sig.append(np.sqrt(float(src.TS_value)))
                    else:
                        pts_sig.append(np.inf)

                    if hasattr(src, 'Flux1000'):
                        pts_flx.append(float(src.Flux1000))
                    else:
                        pts_flx.append(np.inf)

                    if hasattr(src, 'Variability_Index'):
                        pts_var.append(float(src.Variability_Index))
                    else:
                        pts_var.append(np.inf)
                else:
                    pts_flx.append(np.inf)
                    pts_sig.append(np.inf)
                    pts_var.append(np.inf)

            for sname in self.dfs['name']:
                if sname in sm2.srcList.keys():
                    src = sm2[sname]
                    if hasattr(src, 'TS_value'):
                        dfs_sig.append(np.sqrt(float(src.TS_value)))
                    else:
                        dfs_sig.append(np.inf)

                    if hasattr(src, 'Flux1000'):
                        dfs_flx.append(float(src.Flux1000))
                    else:
                        dfs_flx.append(np.inf)

                    if hasattr(src, 'Variability_Index'):
                        dfs_var.append(float(src.Variability_Index))
                    else:
                        dfs_var.append(np.inf)
                else:
                    dfs_flx.append(np.inf)
                    dfs_sig.append(np.inf)
                    dfs_var.append(np.inf)

        self.pts.replace_column('sig', pts_sig)
        self.pts.replace_column('flux', pts_flx)
        self.pts.replace_column('var', pts_var)

        self.dfs.replace_column('sig', dfs_sig)
        self.dfs.replace_column('flux', dfs_flx)
        self.dfs.replace_column('var', dfs_var)

    def __getitem__(self, key):
        if isinstance(key, str):
            snames_list = [key]
        elif isinstance(key, list):
            snames_list = key
        elif key is None:
            raise IOError('The input key cannot be None!')
        else:
            snames_list = list(key)

        dfs_names = self.dfs['name']
        dfs_idx, found_list = [], []
        for sname in snames_list:
            try:
                idx = np.nonzero(dfs_names == sname)[0][0]
            except IndexError:
                continue
            dfs_idx.append(idx)
            found_list.append(sname)

        pts_names = self.pts['name']
        pts_idx = []
        for sname in snames_list:
            if sname in found_list:
                continue
            try:
                idx = np.nonzero(pts_names == sname)[0][0]
            except IndexError:
                continue
            pts_idx.append(idx)
        return self.pts[pts_idx], self.dfs[dfs_idx]

def freeze_index(infile, outfile=None):
    srcModel = SourceModel(infile, raiseErr=True)
    for src in srcModel.srcList.values():
        for parname, par in src.spectrum.parameters.items():
            if hasattr(par, 'error'): del par['error']
            if (parname == 'Prefactor' or parname == 'norm' or parname == 'Integral' or
                parname == 'Value' or parname == 'Normalization'):
                continue
            if hasattr(par, 'tieto'): del par['tieto']
            par.free = 0

    srcModel.setAttributes()
    srcModel.writeTo(outfile)

def freeze_parameter(infile, param, outfile=None):
    srcModel = SourceModel(infile, raiseErr=True)
    for src in srcModel.srcList.values():
        if hasattr(src.spectrum, param) and src.spectrum.parameters[param].free:
            print('%s of source %s is freezed!' % (param, src.name))
            par = src.spectrum.parameters[param]
            if hasattr(par, 'tieto'): del par['tieto']
            par.free = 0

    srcModel.setAttributes()
    srcModel.writeTo(outfile)

def freeze_allButIemIso(infile, outfile=None):
    """
    Freeze sources except those with names contain 'iem' and 'iso'.
    """
    srcModel = SourceModel(infile, raiseErr=True)
    for sname, src in srcModel.srcList.items():
        _sname = sname.lower()
        if 'iem' in _sname or 'iso' in _sname:
            continue
        for pname, par in src.spectrum.parameters.items():
            if hasattr(par, 'error'): del par['error']
            if hasattr(par, 'tieto'): del par['tieto']
            par.free = 0

    srcModel.writeTo(outfile)

def turnOffEdisp(infile, outfile, keep_freezed=False):
    """
    Turn off the energy dispersion.
    If keep_freezed=True, do not change the edisp of freezed sources.
    """
    srcModel = SourceModel(infile, raiseErr=True)
    for sname, src in srcModel.srcList.items():
        if keep_freezed and (not src.isfree):
            continue
        src.spectrum['apply_edisp'] = 'false'
        print('[turnOffEdisp] The edisp of %s is off!' % sname)
    srcModel.writeTo(outfile)

def turnOnEdispForFreeSource(infile, outfile,
                             not_apply_to_index_freezed=False, keep_freezed=False):
    """
    Turn on edisp swith for free source.
    When not_apply_to_index_freezed=True, the edisp of index-freezed source will
    be disabled.
    If keep_freezed=True, do not change the edisp of freezed sources.
    Valid for version v10r0p5 of the Science Tools
    """
    srcModel = SourceModel(infile, raiseErr=True)
    for sname, src in srcModel.srcList.items():
        _sname = sname.lower()
        if 'iem' in _sname or 'iso' in _sname:
            src.spectrum['apply_edisp'] = 'false'
        else:
            edispon_flag = src.isfree
            if not_apply_to_index_freezed:
                isindexfree = False
                for pname, par in src.spectrum.parameters.items():
                    if (pname == 'Prefactor' or pname == 'norm' or
                        pname == 'Integral' or pname == 'Value' or
                        pname == 'Normalization'):
                        continue
                    if par.free > 0:
                        isindexfree = True
                        break
                edispon_flag *= isindexfree
            if edispon_flag:
                src.spectrum['apply_edisp'] = 'true'
            elif not keep_freezed:
                src.spectrum['apply_edisp'] = 'false'
    srcModel.writeTo(outfile)

def addAeffsysToModel(infile, scalefile, outfile):
    """
    Add Aeff_sys to the sources with spectral index free.
    The scaling is given in a scaling file which is just a text file with two columns,
    energy in MeV and the relative scaling of the effective area. Because points are
    interpolated on a log-log grid (i.e., a power-law is used to interpolate between
    points), it is strongly recommended to use a rather fine log(E) binning (a good
    rule of thumb is at least 15 bins per decade).
    Check http://fermi.gsfc.nasa.gov/ssc/data/analysis/scitools/Aeff_Systematics.html for details.
    Valid for version v10r0p5 of the Science Tools.
    """
    srcModel = SourceModel(infile, raiseErr=True)
    for sname, src in srcModel.srcList.items():
        _sname = sname.lower()
        if not ('iem' in _sname or 'iso' in _sname) and src.isfree:
            for pname, par in src.spectrum.parameters.items():
                if (pname == 'Prefactor' or pname == 'norm' or
                    pname == 'Integral' or pname == 'Value' or
                    pname == 'Normalization'):
                    continue
                if par.free > 0:
                    src.spectrum['scaling_file'] = scalefile
                    break
    srcModel.writeTo(outfile)

def doNothing(infile, outfile=None, sort=False):
    """
    Make a xmlfile the same fashion as those output by SMKernel, which is convient
    to compare with diff command.
    """
    sM = SourceModel(infile, raiseErr=True)
    if sort: sM.sort()
    sM.writeTo(outfile)

#def classifySrcs(infile, outprefix=None, keepiem=True):
def classifyPsExt(infile, outprefix=None, keepiem=True):
    """
    Separate the diffuse and point sources in the infile to two separated files.
    If keepiem=True, no iem and iso component in either models, otherwise it will
    be put in diffuse model.
    """
    outprefix = outprefix or '.'.join(infile.split('.')[:-1])
    outps = outprefix + '_ps.xml'
    outdf = outprefix + '_df.xml'

    srcModel = SourceModel(infile, raiseErr=True)
    psModel = SourceModel(raiseErr=False)
    dfModel = SourceModel(raiseErr=False)
    for src in srcModel.srcList.values():
        if src.type == 'PointSource':
            psModel[None] = src
        else:
            if (not keepiem) and ('iem' in src.name or 'iso' in src.name):
                print('[INFO] KEEPIEM=False, src %s is skipped!' % src.name)
                continue
            dfModel[None] = src
    psModel.writeTo(outps)
    dfModel.writeTo(outdf)

def classifyFreeFix(infile, outprefix=None):
    """
    Separate the free and fixed sources in the infile to two separate files.
    """
    outprefix = outprefix or '.'.join(infile.split('.')[:-1])
    outfr = outprefix + '_freed.xml'
    outfx = outprefix + '_fixed.xml'

    srcModel = SourceModel(infile, raiseErr=True)
    frModel = SourceModel(raiseErr=False)
    fxModel = SourceModel(raiseErr=False)
    for src in srcModel.srcList.values():
        if src.isfree:
            frModel[None] = src
        else:
            fxModel[None] = src
    frModel.writeTo(outfr)
    fxModel.writeTo(outfx)

def xmlMerge(infile1, infile2, outfile, exceptions=None):
    """
    add infile1 and infile2, and save to outfile except the ones listed in
    the exceptions.
    """
    outxml = SourceModel(infile1, raiseErr=True)
    inxml2 = SourceModel(infile2, raiseErr=True)
    for src in inxml2.srcList.values():
        outxml[None] = src

    if not exceptions is None:
        if isinstance(exceptions, str):
            exceptions = [exceptions]
        elif not isinstance(exceptions, list):
            exceptions = list(exceptions)

        for name in exceptions:
            if name in outxml.srcList.keys():
                del outxml[name]

    outxml.sort()
    outxml.writeTo(outfile)

def xmlAddOneByOne(original_file, add_file, outdir=None, rename=None):
    """
    Add every srcs in add_file one by one to original_file and save to outdir.
    """
    addxml = SourceModel(add_file, raiseErr=True)
    origin = SourceModel(original_file, raiseErr=True)

    prefix = '.'.join(original_file.split('/')[-1].split('.')[:-1])
    if outdir is None:
        outdir = ''
    elif outdir and (not outdir.endswith('/')):
        outdir += '/'

    for src in addxml.srcList.values():
        if isinstance(rename, str):
            name = rename
        else:
            name = src['name']
        outfile = outdir + prefix + '_' + '_'.join(src['name'].split()) + '.xml'
        print('[xmlAddOneByOne] Add %s to %s and save to %s ...' % (src['name'], original_file, outfile))
        origin[name] = src
        origin.writeTo(outfile)
        del origin[name]
    print('Done.')

#def makeXmlFromList(extsrcsList, outfile, spectype='PowerLaw', ismapcube=False):
def genExtXml(extsrcsList, outfile, spectype='PowerLaw', ismapcube=False):
    """
    Collect the srcfile in extsrcslist, make a xmlfile and save to outfile.
    spectype=PowerLaw|LogParabola|PLSuperExpCutoff
    """
    if isinstance(extsrcsList, list):
        _srcList = copy.deepcopy(extsrcsList)
    elif isinstance(extsrcsList, np.ndarray):
        _srcList = extsrcsList.flatten().tolist()
    elif isinstance(extsrcsList, str):
        if os.path.isfile(extsrcsList):
            _srcList = []
            fh = open(extsrcsList, 'r')
            _srcList = [s.strip() for s in fh]
            fh.close()
        else:
            _srcList = [extsrcsList]
    else:
        _srcList = list(extsrcsList)

    cwd = os.getcwd()
    _spectype = spectype.lower()
    sm = SourceModel(raiseErr=False)
    for sf in _srcList:
        if '.fit' in sf:
            name = '.fit'.join(sf.split('/')[-1].split('.fit')[:-1])
        else:
            name = sf.split('/')[-1]

        if sf.startswith('/'):
            _sf = sf
        else:
            _sf = '%s/%s' % (cwd, sf)
        print('[makeXmlFromList] Adding %s ...' % _sf)

        if ismapcube:
            spatobj = Models2.SpatialType.MapCube(_sf)
        else:
            spatobj = Models2.SpatialType.SpatialMap(_sf)

        if _spectype == 'powerlaw':
            src = Models2.PowerLaw(name, spatobj)

            if ismapcube: # as a scale
                src.spectrum.Prefactor.value = 1.
                src.spectrum.Prefactor.scale = 1.
                src.spectrum.Index.value = 0.
                src.spectrum.Index.scale = 1.
                src.spectrum.Index.min = -5.
        elif _spectype == 'logparabola':
            src = Models2.LogParabola(name, spatobj)

            if ismapcube: # as a scale
                src.spectrum.norm.value = 1.
                src.spectrum.norm.scale = 1.
                src.spectrum.alpha.value = 0.
                src.spectrum.alpha.scale = 1.
                src.spectrum.alpha.min = -5.
        elif _spectype == 'plsuperexpcutoff':
            src = Models2.PLSuperExpCutoff(name, spatobj)

            if ismapcube: # as a scale
                src.spectrum.Prefactor.value = 1.
                src.spectrum.Prefactor.scale = 1.
                src.spectrum.Index1.value = 0.
                src.spectrum.Index1.scale = 1.
                src.spectrum.Index1.min = -5.
        else:
            raise NotImplementedError(spectype)

        sm[None] = src
    sm.writeTo(outfile)

def copyFreeFix(infile, example, outfile):
    """
    copy the free fix of parameters in example to infile, and save to outfile
    """
    x0 = SourceModel(infile, raiseErr=True)
    x1 = SourceModel(example, raiseErr=True)
    for sname, src1 in x1.srcList.items():
        if sname in x0.names and (x0[sname].spectrum.type == src1.spectrum.type):
            src0 = x0[sname]
            for pname, par in src1.spectrum.parameters.items():
                src0.spectrum.parameters[pname].free = par.free
    x0.writeTo(outfile)

def srcSpecDiff(infile1, infile2, outfile='spect_diff.dat'):
    """
    Compare the value of free parameters between the two files
    """
    x1 = SourceModel(infile1, raiseErr=True)
    x2 = SourceModel(infile2, raiseErr=True)

    p1val, p2val, p1err, p2err = [], [], [], []
    cmts = []
    for sname, src1 in x1.srcList.items():
        if sname in x2.names:
            src2 = x2[sname]
            if src1.spectrum.type == src2.spectrum.type:
                for pname, par1 in src1.spectrum.parameters.items():
                    par2 = src2.spectrum.parameters[pname]
                    if par1.free or par2.free:
                        cmts.append('# %s__%s'%(sname, pname))
                        p1scal, p2scal = par1.scale, par2.scale
                        factor = p2scal / p1scal
                        p1val.append('%.10e'%par1.value)
                        p2val.append('%.10e'%(par2.value*factor))
                        if 'error' in par1.__dict__.keys():
                            p1err.append('%.10e'%par1.error)
                        else:
                            p1err.append('%.10e'%0.)
                        if 'error' in par2.__dict__.keys():
                            p2err.append('%.10e'%(par2.error*np.abs(factor)))
                        else:
                            p2err.append('%.10e'%0.)
            else:
                print('The spectrum types are different for %s: %s v.s. %s' % \
                      (sname, src1.spectrum.type, src2.spectrum.type))

    outdata = np.c_[p1val, p2val, p1err, p2err, cmts]
    np.savetxt(outfile, outdata, delimiter='\t', fmt='%s',
               header='p1val\tp2val\tp1err\tp2err\tpnames\t###1=%s; 2=%s' % (infile1, infile2))

def separateXmlFile(infile, npart):
    """
    Separate the input xmlfile input npart parts.
    npart should be an integer and larger than 1.
    """
    assert isinstance(npart, int)
    assert npart > 1

    x = SourceModel(infile, raiseErr=True)
    nsrcs = len(x.names)
    nsrcs_per_part = nsrcs // npart
    assert nsrcs > 0

    print('[separateXmlFile] Start to separate ...')
    isrc, ipart, xx = 0, 0, None
    outfile_format = '.'.join(infile.split('/')[-1].split('.')[:-1]) + '_part%02i.xml'
    for sname, src in x.srcList.items():
        if (isrc % nsrcs_per_part == 0) and (ipart < npart):
            if not xx is None:
                outfile = outfile_format%ipart
                xx.writeTo(outfile)
                print('[separateXmlFile] %s sources have been saved to %s!' % \
                       (len(xx.names), outfile))
            ipart += 1
            xx = SourceModel(raiseErr=False)
        xx[None] = src
        isrc += 1

    outfile = outfile_format%ipart
    xx.writeTo(outfile)
    print('[separateXmlFile] %s sources have been saved to %s!' % \
           (len(xx.names), outfile))


def replaceExpCutoff(infile, outfile):
    """
    replace the spectrum of the free sources with ExpCutoff
    with the similar LogParabola spectral type
    It may be helpful in the sed fitting
    """
    xobj = SourceModel(infile)
    newsobjs = []
    for sname, sobj in xobj.srcList.items():
        if sobj.isfree and 'ExpCutoff' in sobj.spectrum.type:
            spec = sobj.spectrum
            sptype = spec.type

            if sptype == 'PLSuperExpCutoff4':
                g0 = spec.IndexS.value*spec.IndexS.scale
                d = spec.ExpfactorS.value*spec.ExpfactorS.scale
                N0 = spec.Prefactor.value*spec.Prefactor.scale
                b = spec.Index2.value*spec.Index2.scale

                g0f = spec.IndexS.free
                df = spec.ExpfactorS.free
                covtype = 1 # to logP
            elif sptype == 'PLSuperExpCutoff2':
                g1 = spec.Index1.value*spec.Index1.scale
                a = spec.Expfactor.value*spec.Expfactor.scale
                b = spec.Index2.value*spec.Index2.scale
                N1 = spec.Prefactor.value*spec.Prefactor.scale
                E0 = spec.Scale.value*spec.Scale.scale

                d = a*b**2*E0**b
                g0 = g1-d/b
                N0 = N1*np.exp(-d/b**2)

                g0f = spec.Index1.free
                df = spec.Expfactor.free | spec.Index2.free
                covtype = 1 # to logP
            elif sptype == 'PLSuperExpCutoff':
                g1 = spec.Index1.value*spec.Index1.scale
                b = spec.Index2.value*spec.Index2.scale
                N1 = spec.Prefactor.value*spec.Prefactor.scale
                Ec = spec.Cutoff.value*spec.Cutoff.scale
                E0 = spec.Scale.value*spec.Scale.scale

                d = b**2*(E0/Ec)**b
                g0 = g1-d/b
                N0 = N1*np.exp(-d/b**2)

                g0f = spec.Index1.free
                df = spec.Cutoff.free | spec.Index2.free
                covtype = 1 # to logP
            else:
                raise NotImplementedError(sptype)

            spat0 = sobj.spatialModel
            if spat0.type == 'SkyDirFunction':
                ra = spat0.RA.value*spat0.RA.scale
                dec= spat0.DEC.value*spat0.DEC.scale
                spatobj = Models2.SpatialType.PointSource(ra, dec)
            elif spat0.type == 'SpatialMap':
                spatobj = Models2.SpatialType.SpatialMap(spat0.file)
            elif spat0.type == 'RadialDisk':
                ra = spat0.RA.value*spat0.RA.scale
                dec= spat0.DEC.value*spat0.DEC.scale
                rad= spat0.Radius.value*spat0.Radius.scale
                spatobj = Models2.SpatialType.RadialDisk(ra, dec, rad)
            elif spat0.type == 'RadialGaussian':
                ra = spat0.RA.value*spat0.RA.scale
                dec= spat0.DEC.value*spat0.DEC.scale
                rad= spat0.Sigma.value*spat0.Sigma.scale
                spatobj = Models2.SpatialType.RadialGaussian(ra, dec, rad)
            else:
                raise NotImplementedError(spat0.type)

            if covtype == 1:
                alpha = -g0
                beta = d/2.
                N0scal = 10.**(int(np.log10(N0)+0.5))
                N0val = N0/N0scal

                if alpha<0. or alpha>5.:
                    print('[warn] %s: alpha=%f'%(sname, alpha))
                if beta<0. or beta>1.:
                    print('[warn] %s: beta=%f'%(sname, beta))

                sobj2 = Models2.LogParabola(sname, spatobj)
                sobj2.spectrum.norm.value = N0val
                sobj2.spectrum.norm.scale = N0scal
                sobj2.spectrum.norm.free = True

                sobj2.spectrum.Eb.value = sobj.spectrum.Scale.value
                sobj2.spectrum.Eb.scale = sobj.spectrum.Scale.scale
                sobj2.spectrum.Eb.min = sobj.spectrum.Scale.min
                sobj2.spectrum.Eb.max = sobj.spectrum.Scale.max
                sobj2.spectrum.Eb.free = False

                sobj2.spectrum.alpha.value = alpha
                sobj2.spectrum.alpha.scale = 1.
                sobj2.spectrum.alpha.free = bool(g0f)
                if alpha<=sobj2.spectrum.alpha.min:
                    sobj2.spectrum.alpha.min = alpha-2.
                if alpha>=sobj2.spectrum.alpha.max:
                    sobj2.spectrum.alpha.max = alpha+2.

                sobj2.spectrum.beta.value = beta
                sobj2.spectrum.beta.scale = 1.
                sobj2.spectrum.beta.free = bool(df)
                if beta<=sobj2.spectrum.beta.min:
                    sobj2.spectrum.beta.min = beta-2.
                if beta>=sobj2.spectrum.beta.max:
                    sobj2.spectrum.beta.max = beta+2.
            else:
                raise NotImplementedError('covtype=%d'%covtype)

            newsobjs.append(sobj2)

    for sobj in newsobjs:
        xobj[None] = sobj

    xobj.writeTo(outfile)
