
from view.endSrc.MySqlConn import MySqlConn
from view.endSrc.DBConfig import DBConfig
from view.endSrc.MyTools import MyTools

import numpy as np
from numpy import genfromtxt


class tGaussianClustersEstimatorBy1D:

    def setSqlConn(self, dbConn: MySqlConn):
        '''
            let use set sql connector dynamatically
        :param dbConn:
        :return:
        '''
        if dbConn is not None:
            self._dbconn = dbConn
            self._logger = dbConn.logger

    def __init__(self, dbconn=None):
        self._dbconn = None
        self._logger = None
        self.setSqlConn(dbconn)

        self.id = None
        self.observerId = None
        self.nComponents = None  # read-only

        # self.p_epsilon = 1e-08           # p: for optimization param
        # self.p_maxIter = 1000
        # self.p_maxRestarts = 20
        # self.p_verb        = False

        self.estMixedParams = None        # JSON dict, estimated Gamma mixture parameters: meanList, varList, paiList
        self.estProbMatrixFileName = None  # csv file for estimated ProbClusters

        self.estNegLogLikelihood = None      # optimized result for Gamma parameter estimatation
        self.estRegularItem = None
        # self.nIterations = None             # number of iterations to find Gamma parameters
        # self.allLogLiks = None              # JSON list like [-67990.56249818485, -67954.91182177133, -67954.0048652186, .. ]

        ##########################################
        self.m_estProbMatrix = None         # memory object from the file self.estProbMatrixFileName

    def __str__(self):
        '''
            for simple test
        :return:
        '''
        tmp = '---------- tGammaClusterEstimatorBy1D----------------\n'
        tmp += 'id: ' + str(self.id) + '\n'
        tmp += 'observerId: ' + str(self.observerId) + '\n'
        tmp += 'nComponents: ' + str(self.nComponents) + '\n'
        # tmp += 'p_epsilon: ' + str(self.p_epsilon) + '\n'
        # tmp += 'p_maxIter: ' + str(self.p_maxIter) + '\n'
        # tmp += 'p_maxRestarts: ' + str(self.p_maxRestarts) + '\n'
        # tmp += 'p_verb: ' + str(self.p_verb) + '\n'

        tmp += 'estMixedParams: ' + str(self.estMixedParams) + '\n'

        tmp += 'estProbMatrixFileName: ' + str(self.estProbMatrixFileName) + '\n'
        tmp += 'estNegLogLikelihood: ' + str(self.estNegLogLikelihood) + '\n'
        tmp += 'estRegularItem: ' + str(self.estRegularItem) + '\n'

        tmp += 'm_estProbMatrix: ' + str(self.m_estProbMatrix) + '\n'
        # tmp += 'nIterations: ' + str(self.nIterations) + '\n'
        # tmp += 'allLogLiks: ' + str(self.allLogLiks) + '\n'

        return tmp


    def createTable(self):

        assert self._dbconn is not None, 'please set setSqlConn first'

        if self._dbconn.tableExists('tGaussianClustersEstimatorBy1D'):
            print('exist..tGaussianClustersEstimatorBy1D, if you want to drop it, please call MySqlConn.dropTable')
            return True

        # r_xxx: read-only fields
        # p_epsilon double not null DEFAULT 1e-08,
        # p_maxIter int not null DEFAULT 1000,
        # p_maxRestarts int not null DEFAULT 20,
        # p_verb TINYINT(1) not null DEFAULT 0,
        #
        sql = """create table tGaussianClustersEstimatorBy1D(
                id int primary key auto_increment not null,
                observerId int not null,
                nComponents int not null,
                estMixedParams  JSON,
                estProbMatrixFileName CHAR(200),
                estNegLogLikelihood double,
                estRegularItem double ) """

        # foreign key(observerId) references tObserver(id)), donot use it for development

        if not self._dbconn.createTable(sql):
            self._logger.write('tGaussianClustersEstimatorBy1D.createTable is failed')
            return False
        return True


    def createRow(self, observerId, nComponents, estMixedParamsJSONDict=None):
        '''
            create a new DB record for algorithm running
        :param observerId:
        :param nComponents: provides initial Gamma parameters
        :return:
        '''
        assert nComponents > 0, 'The estimated number of Gamma components must be positive'
        assert observerId > 0 and isinstance(observerId, int)

        assert self._dbconn is not None, 'please set setSqlConn first'

        #                 id int primary key auto_increment not null,
        #                 observerId int not null,
        #                 nComponents int not null,
        #                 estMixedParams  JSON,
        #                 estProbMatrixFileName CHAR(200),
        #                 estNegLogLikelihood double,
        #                 estRegularItem double,

        # build a row of record
        # p_verb = 1 if self.p_verb else 0

        rtup = [observerId, nComponents]

        # build a sql statement
        sql = "INSERT INTO tGaussianClustersEstimatorBy1D( " \
              "observerId, nComponents )" \
              "VALUES ( %d, %d ) " % tuple(rtup)

        newId = self._dbconn.insertRetId(sql)
        if newId is not None:
            self.id = newId
            # save memory of fields
            self.observerId = observerId
            self.nComponents = nComponents
            self.estMixedParams = estMixedParamsJSONDict

            self.estProbMatrixFileName = None
            self.estNegLogLikelihood = None
            self.estRegularItem = None
        else:
            self._logger.write('tGaussianClustersEstimatorBy1D.createRow: failed to insert..' + str(rtup))
        return newId

    def readRow(self, id: int):
        '''
            read a specific any row by id
        :param id: DB id of this table
        :return: True if ok
        '''
        assert isinstance(id, int)
        assert self._dbconn is not None, 'please set setSqlConn first'

        sql = "SELECT * FROM tGaussianClustersEstimatorBy1D WHERE id = '%s'" % id
        rows = self._dbconn.read(sql)
        if rows is None or len(rows) == 0:
            self._logger.write('tGaussianClustersEstimatorBy1D.readRow..failed to read')
            return False
        #                 id int primary key auto_increment not null,
        #                 observerId int not null,
        #                 nComponents int not null,
        #                 estMixedParams  JSON,
        #                 estProbMatrixFileName CHAR(200),
        #                 estNegLogLikelihood double,
        #                 estRegularItem double,
        row = rows[0]

        self.id = row[0]
        self.observerId = row[1]
        self.nComponents = row[2]

        self.estMixedParams = MyTools.getDicByJson(row[3])  # python dict or None

        if row[4] is None:
            self.estProbMatrixFileName = None
        else:
            self.estProbMatrixFileName = row[4]

            pathPrefix = DBConfig().dataFilesRelPathPrefix
            try:

                self.m_estProbMatrix = genfromtxt(pathPrefix + self.estProbMatrixFileName,
                                      delimiter=',')  # use np.csv for high performance, instead of pd
            except Exception as e:
                self._logger.write(
                    'tNormalCluEstBy1D.readRow..failed to probMatrix from the file: ' + self.estProbMatrixFileName)
                self.m_estProbMatrix = None
                return False

        self.estNegLogLikelihood = MyTools.toFloatIfNotNone(row[5])
        self.estRegularItem = MyTools.toFloatIfNotNone(row[6])

        return True


    def _removeEstProbMatrixFile(self):

        rtup = (self.id)
        sql = "SELECT estProbMatrixFileName FROM tGaussianClustersEstimatorBy1D WHERE id=%d" % rtup
        rows = self._dbconn.read(sql)

        # print('rows: ', rows)  # ((None,),)

        if rows is None:
            tmp = 'tGaussianClustersEstimatorBy1D._removeEstProbMatrixFile..'
            tmp += 'select estProbMatrixFileName..is failed for id=' + str(self.id)
            self._logger.write(tmp)
            return False

        if len(rows) == 0:
            return True

        self.estProbMatrixFileName = rows[0][0]
        if self.estProbMatrixFileName is None:
            return True   # ok, it does not exist

        # try to remove it
        pathPrefix = DBConfig().dataFilesRelPathPrefix
        return MyTools.removeFile(pathPrefix + self.estProbMatrixFileName)

    def updateEstimation(self, mixedParamsJSONDict, probMatrix: np.ndarray,
                         estNegLogLikelihood, estRegularItem):
        '''

        :param mixedParamsJSONDict:     JSON dict, paiList, meanList, varMatList
        :param probMatrix:              np.ndarray with shape (n,k)
        :param estNegLogLikelihood:     double
        :param estRegularItem:          double
        :param niterations:             int
        :return: True if ok
        '''

        # estMixedParams  JSON,
        # estProbMatrixFileName CHAR(200),   ** set automatically
        # estNegLogLikelihood double,
        # estRegularItem double,
        # nIterations int
        # allLog

        assert self._dbconn is not None, 'please set setSqlConn first'
        assert self.id is not None, 'tGaussianClustersEstimatorBy1D must be loaded from DB first'
        assert len(probMatrix.shape) == 2, 'probMatrix must be a np.ndarray with shape like (n, k)'

        # remove estProbMatrixFileName if exist
        self._removeEstProbMatrixFile()

        # write probMatrix to file
        pathPrefix = DBConfig().dataFilesRelPathPrefix
        date = MyTools.getDatetimeFileName()
        fName= 'GamEstBy1D_' + 'nc' + str(self.nComponents) + '_probMat_' + date + '.csv'
        try:
            np.savetxt(pathPrefix + fName, probMatrix, delimiter=",")
        except Exception as e:
            self._logger.write('tGaussianClustersEstimatorBy1D.updateEstimation..failed to write to files: ' + str(e) + \
                               ' for id = ' + str(self.id))
            return False

        # write to DB
        rtup = (MyTools.getJsonByDict(mixedParamsJSONDict),
                fName,
                estNegLogLikelihood,
                estRegularItem,
                self.id)
        # create sql
        sql = "UPDATE tGaussianClustersEstimatorBy1D SET " \
                "estMixedParams = '%s', " \
                "estProbMatrixFileName = '%s', " \
                "estNegLogLikelihood = %f, " \
                "estRegularItem = %f " \
              "WHERE id= %d " % rtup

        if not self._dbconn.insDelUpd(sql):
            self._logger.write('tGaussianClustersEstimatorBy1D.updateEstimation is failed for id= ' + self.id)
            return False

        # set memory fields at last
        self.estMixedParams = mixedParamsJSONDict
        self.estProbMatrixFileName = fName
        self.m_estProbMatrix = probMatrix

        self.estNegLogLikelihood = estNegLogLikelihood
        self.estRegularItem = estRegularItem

        return True

    def getEstimatorDetailByOb(self, observerId):

        assert self._dbconn is not None, 'please set setSqlConn first'
        assert isinstance(observerId, int) and observerId > 0

        sql = "SELECT * from tGaussianClustersEstimatorBy1D WHERE observerId='%d'" % observerId
        rows = self._dbconn.read(sql)
        if rows is None:
            self._logger.write('tGaussianClustersEstimatorBy1D.getEstimatorDetailByOb..failed to read by observerId= ' + str(observerId))
            return None

        if len(rows) == 0:
            self._logger.write('tGaussianClustersEstimatorBy1D.getEstimatorDetailByOb.. read nothing by observerId= ' + str(observerId))

        return rows