from flask import Blueprint, request, render_template, session, redirect, flash, jsonify
from flask import current_app as app
import numpy as np
import json
from sklearn.neighbors import KernelDensity
from view.endSrc.PolarCoordConvert import PolarCoordConvert
from view.endSrc.GetEllipse import GetEllipse
from view.endSrc.DimensionReduction import DimensionReduction
from view.endSrc.Dataset import Dataset
from view.endSrc.tHighDensityFilter import tHighDensityFilter
from view.endSrc.tUser import tUser
from view.endSrc.tIniceRunner import tIniceRunner
from view.endSrc.tDataset import tDataset
from view.endSrc.tObserver import tObserver
from view.iNiceRunner import INiceRunner
from scipy import stats
from scipy.spatial import distance
from view.endSrc.MyTools import MyTools

ac = Blueprint('ac', __name__)
# # app.config['SQLALCHEMY_DATABASE_URI'] = 'pymysql://inu:abc@localhost:3306/inicedb?charset=utf8'
# app.config['SQLALCHEMY_DATABASE_URI'] = 'mysql+pymysql://inu:abc@127.0.0.1/inicedb?charset=utf8mb4'
# app.config['SQLALCHEMY_COMMIT_ON_TEARDOWN'] = True
# app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
# db = SQLAlchemy(app)


def run(dsIds):
    checkAdmin()
    configName = 'test'
    obGen = 'EdgeObserverGenerator'
    numOfOb = 5
    clusterEstimatorClassName = 'GaussianClustersEstimatorBy1D'
    one2oneClasName = 'One2OneDistance'
    n2nClassName = 'N2NDistances'
    filterCfgJSONDict = {'KdeFilter': {'topPer': 0.9}, 'DensityPeakFilter': {'cutoffPer': 0.1}}

    results = []
    for dsId in dsIds:
        runner = INiceRunner()
        runner.setConfig(sqlConn=app.config['MySqlConn'], dsId=dsId, name=configName,
                         observerGenClassName=obGen, nObservers=numOfOb,
                         clusterEstimatorClassName=clusterEstimatorClassName,
                         one2oneClasName=one2oneClasName, n2nClassName=n2nClassName,
                         maxob=20, minob=1,
                         filterCfgJSONDict=filterCfgJSONDict, mixedParamsCfgJSONDict=None)
        runner.run()
        results.append(runner.dbId)

    return results


@ac.route('/show_config', methods=['GET', 'POST'])
def show_config():
    try:
        session['user']
    except:
        return redirect('login')

    dsIds = [1, 2, 3]
    if request.method == 'GET':

        dsDescs = []
        for outi, dsId in enumerate(dsIds):
            ds = Dataset.fromDB(app.config['MySqlConn'], dsId)

            trueCenters = ds.getTrueCentres().tolist()

            dsDescs.append(
                {
                    'code': str(outi),
                    'trueCentersNum': len(trueCenters),
                    'clustersNum': ds.getNumOfClusters(),
                    'rows': ds.getNumOfRecords(),
                    'name': ds.t.name,
                    'desc': ds.t.des,
                    'id': dsId
                }
            )

        return render_template('show_config.html', dsDescs=dsDescs)
    if request.method == 'POST':
        toIds = []
        form = request.form.to_dict()
        if len(form) == 0:
            return redirect('show_config')
        for id in form:
            toIds.append(dsIds[int(id)])

        redirect_url = 'show_result?'
        resultIds = run(toIds)
        for id in resultIds:
            redirect_url += 'id=' + str(id) + '&'
        redirect_url = redirect_url.strip('&')
        return redirect(redirect_url)


@ac.route('/show_result', methods=['GET', 'POST'])
def show_result():
    try:
        session['user']
    except:
        return redirect('login')

    requestArgs = request.values
    if request.method == 'GET':
        runnerIds = requestArgs.getlist('id')
        if runnerIds is None or len(runnerIds) == 0:
            return {}

        runnerIds = [int(id) for id in runnerIds]

        runResults = []
        for outi, runnerId in enumerate(runnerIds):
            tRunner = tIniceRunner(app.config['MySqlConn'])
            tRunner.readRow(runnerId)
            ds = Dataset.fromDB(app.config['MySqlConn'], tRunner.datasetId)
            tOb = tObserver(app.config['MySqlConn'])

            trueCenters = ds.getTrueCentres().tolist()
            dataset = ds.t.m_dataset.tolist()

            sessionObDetail = tOb.getObserverDetailBySession(runnerId)

            observer = []
            for i, row in enumerate(sessionObDetail, start=1):
                observer.append(json.loads(row[4])['vp'])

            estCenters = json.loads(tRunner.centers)['cts']

            if len(dataset[0]) > 2:
                # ds_dr, ob_dr, ec_dr, tc_dr = DimensionReduction.decomposition_MutliVari(False, 2, np.array(dataset), np.array(observer), np.array(estCenters), np.array(trueCenters))
                dataset, observer, estCenters, trueCenters = DimensionReduction.decomposition_MutliVari(False, 2, np.array(dataset),
                                                                                        np.array(observer),
                                                                                        np.array(estCenters),
                                                                                        np.array(trueCenters))

                kmeansidx, kmeanscenter = DimensionReduction.split_data_reidx(tRunner.nCenters, dataset, estCenters)
                splitdata = DimensionReduction.splitdata(kmeansidx, dataset)
            else:
                kmeansidx, kmeanscenter = DimensionReduction.split_data_reidx(tRunner.nCenters, dataset, estCenters)
                splitdata = DimensionReduction.splitdata(kmeansidx, dataset)
            runResults.append(
                {
                    'code': 'r'+str(outi),
                    'trueCenters': trueCenters,
                    'estCenters': estCenters,
                    'ob': observer,
                    'dataset': dataset,
                    # 'trueCenters_2d': tc_dr,
                    # 'estCenters_2d': ec_dr,
                    # 'ob_2d': ob_dr,
                    # 'dataset_2d': ds_dr,
                    'splitdata': splitdata,
                    'trueClustersNum': ds.getNumOfClusters(),
                    'estClustersNum': tRunner.nCenters,
                    'ARI': tRunner.ARI,
                    'NMI': tRunner.NMI,
                    'purity': tRunner.purity,

                    'id': runnerId,
                    'name': ds.t.name
                }
            )

        return render_template("show_result.html", runResults=runResults, ecnum=tRunner.nCenters)


@ac.route('/running_config', methods=['GET', 'POST'])
def running_config():
    try:
        session['user']
    except:
        return redirect('login')
    if request.method == 'GET':
        print('running config: enter GET....')
        t = tIniceRunner(app.config['MySqlConn'])
        oldConfigNameList = t.getOldRunConfigNameList()

        t = tDataset(app.config['MySqlConn'])
        datasetNameList = t.getDatasetNameList()

        return render_template('running_config.html', oldConfigNameList=oldConfigNameList, datasetNameList=datasetNameList)

    if request.method == 'POST':
        print('running config: enter POST....')
        requestArgs = request.values

        tIniceRunnerID = requestArgs.get("oldConfigId")
        if tIniceRunnerID is not None:
            if len(tIniceRunnerID) == 0:
                return {}
            tIniceRunnerID = int(tIniceRunnerID)

            t = tIniceRunner(app.config['MySqlConn'])
            t.readRow(tIniceRunnerID)
            oldConfigDetail = {"datasetId": t.datasetId, "obGen": t.observerGenClassName, "numOfOb": t.nObservers}

            t = tObserver(app.config['MySqlConn'])
            obConfigDetailRows = t.getObConfigDetail(tIniceRunnerID)
            oldConfigDetail['obDetail'] = obConfigDetailRows

            # print(oldConfigDetail)

            return jsonify(oldConfigDetail)

        datasetId = requestArgs.get("datasetId")
        if datasetId is not None:
            if len(datasetId) == 0:
                return {}
            datasetId = int(datasetId)

            t = tDataset(app.config['MySqlConn'])
            t.getDatasetDetail(datasetId)

            return jsonify({"datasetName": t.name, "className": t.className, "nRows": t.nRows,
                            "nCols": t.nCols, "nClusters": t.nClusters})


@ac.route('/dataset_detail', methods=['GET', 'POST'])
def dataset_detail():
    print("enter dataset_detail")

    try:
        session['user']
    except:
        return redirect('login')

    requestArgs = request.values

    if request.method == 'GET':
        print('dataset detail: enter GET....')

        id = requestArgs.get("id")
        if id is None or len(id)==0:
            return {}
        id = int(id)

        dataset = Dataset.fromDB(app.config['MySqlConn'], id)

        detailDict = {'id': id, 'description': dataset.t.des, 'fileName': dataset.t.fileName,
                      'nRows': dataset.getNumOfRecords(), 'nCols': dataset.getNumOfAttributes(),
                      'className': dataset.t.className, 'numOfClusters': dataset.getNumOfClusters(),
                      'trueCentersFileName': dataset.t.trueCentresFileName, 'name': dataset.t.name}

        paraDetail = dataset.getMixedParams()
        datasetData = dataset.t.m_dataset
        trueCenters = dataset.getTrueCentres()

        # get 2d data to view
        if dataset.getNumOfAttributes() > 2:
            trueCenters_2d, dataset_2d = DimensionReduction.dimensionReduction(trueCenters, datasetData, 2)
        else:
            trueCenters_2d = trueCenters
            dataset_2d = datasetData

        # format convert
        trueCenters_2d = trueCenters_2d.tolist()
        dataset_2d = dataset_2d.tolist()
        trueCenters = trueCenters.tolist()
        datasetData = datasetData.tolist()

        return render_template("dataset_detail.html", detailDict=detailDict, paraDetail=paraDetail,
                               trueCenters=trueCenters, dataset=datasetData, trueCenters_2d=trueCenters_2d,
                               dataset_2d=dataset_2d)

    if request.method == 'POST':
        print('dataset detail: enter POST....')

        id = requestArgs.get("id")
        if id is None:
            return {}
        id = int(id)

        dataset = Dataset.fromDB(app.config['MySqlConn'], id)

        clusterId = requestArgs.get("clusterId")
        if clusterId is not None:
            if len(clusterId) == 0:
                return {}
        clusterId = int(clusterId) - 1

        paraDetail = dataset.getMixedParams()
        covMat = paraDetail['varMatList'][clusterId]
        return jsonify({'covMat': covMat})


@ac.route('/run_results', methods=['GET', 'POST'])
def run_results():
    print('enter run results ...')

    try:
        session['user']
    except:
        return redirect('login')

    requestArgs = request.values

    if request.method == 'GET':
        print('enter run_results GET')
        # get id about result
        runnerId = requestArgs.get("id")
        if runnerId is None or len(runnerId) == 0:
            return {}
        runnerId = int(runnerId)
        wClusterEstimator, clsName = getWebEstimatorClass(runnerId)

        # create page 'run_results'
        tRunner = tIniceRunner(app.config['MySqlConn'])
        tRunner.readRow(runnerId)
        ds = Dataset.fromDB(app.config['MySqlConn'], tRunner.datasetId)
        tOb = tObserver(app.config['MySqlConn'])
        gedfw = wClusterEstimator(app.config['MySqlConn'])

        trueCenters = ds.getTrueCentres().tolist()
        dataset = ds.t.m_dataset.tolist()

        sessionObDetail = tOb.getObserverDetailBySession(runnerId)

        obDetail = []
        observer = []
        centersFoundByOb = []
        obIds = []
        bestEstIds = []
        for i, row in enumerate(sessionObDetail, start=1):
            obDetail.append({'obCode': 'b' + str(i), 'p2pDis': row[7], 'n2nDis': row[8],
                             'estimator': row[3], 'filterConfiguration': json.loads(row[9])})
            observer.append(json.loads(row[4])['vp'])
            centersFoundByOb.append(json.loads(row[6])['cts'])
            obIds.append(row[0])
            bestEstIds.append(row[12])


        estCenters = json.loads(tRunner.centers)['cts']
        # TODO
        # dis = distance.cdist(trueCenters, estCenters, metric='euclidean')
        # closestIdx = np.argmin(dis, axis=1)
        # estCenters_match = [(idx+1, estCenters[idx]) for idx in closestIdx]
        # for i in range(len(estCenters)):
        #     if i not in closestIdx:
        #         estCenters_match.append((i+1, estCenters[i]))
        estCenters_match = [('e'+str(idx+1), center) for idx, center in enumerate(estCenters)]

        # kmean ==========
        splitidx, kmeanscenter = DimensionReduction.split_data_reidx(tRunner.nCenters, dataset, estCenters)
        kmeancenterlist = []
        for i in range(len(kmeanscenter)):
            kmeancenterlist.append(kmeanscenter[i].tolist())

        if ds.getNumOfAttributes() > 2:

            obcs, n_ob = np.array(DimensionReduction.split_list(centersFoundByOb), dtype=object)
            trueCentersArr = np.array(trueCenters)
            estCentersArr = np.array(estCenters)
            datasetArr = np.array(dataset)
            observerArr = np.array(observer)

            tc_dr, ec_dr, ds_dr, ob_dr, obcs_dr, kms_dr = DimensionReduction.decomposition_MutliVari(False, 2, trueCentersArr,
                                                                                             estCentersArr, datasetArr,
                                                                                             observerArr, obcs, kmeanscenter)
            centersFoundByOb = DimensionReduction.de_split_list(obcs_dr, n_ob)

            splitdata = DimensionReduction.splitdata(splitidx, ds_dr)
        else:
            tc_dr, ec_dr, ds_dr, ob_dr, kms_dr = trueCenters, estCenters, dataset, observer, kmeancenterlist
            splitdata = DimensionReduction.splitdata(splitidx, ds_dr)
        # print(splitdata)
        # =======================
        runResults = {
            # -----------------------------------------------Summary of configures
            'id': runnerId,
            'datasetId': tRunner.datasetId,
            'obDetail': obDetail,
            'ObGenClass': tRunner.observerGenClassName,
            'numOfOb': tRunner.nObservers,
            'name': tRunner.name,
            'datasetName': ds.t.name,
            # -----------------------------------------------Summary of results
            'startTime': tRunner.startTime,
            'endTime': tRunner.endTime,
            'ARI': tRunner.ARI,
            'NMI': tRunner.NMI,
            'purity': tRunner.purity,
            'estClusters': tRunner.nCenters,
            'runtime': tRunner.endTime - tRunner.startTime,
            'estCenters_match': estCenters_match,
            'trueClustersNum': ds.getNumOfClusters(),
            # -----------------------------------------------Two dimensional view of summary results:
            'trueCenters': trueCenters,
            'estCenters': estCenters,
            'ob': observer,
            'dataset': dataset,
            # ----------------------------------------------- The two dimensional points input
            'trueCenters_dr': tc_dr,
            'estCenters_dr': ec_dr,
            'ob_dr': ob_dr,
            'dataset_dr': ds_dr,
            # ------------------------------------------------
            "splitdata": splitdata,
            "kmeanscenter": kms_dr,
        }
        # print(centersFoundByOb)
        minVec = np.min(ds_dr, axis=0)
        maxVec = np.max(ds_dr, axis=0)
        cornerPoints = [
            [minVec[0], minVec[1]],
            [maxVec[0], minVec[1]],
            [maxVec[0], maxVec[1]],
            [minVec[0], maxVec[1]],
            [minVec[0], minVec[1]]
        ]
        polarDataset = []
        polarCenters = []
        polarEstCenters = []
        polarSpcPoints = []
        for i, item in enumerate(ob_dr):
            polarDataset.append(PolarCoordConvert.convert(item, ds_dr))
            polarCenters.append(PolarCoordConvert.convert(item, tc_dr))
            polarEstCenters.append(PolarCoordConvert.convert(item, centersFoundByOb[i]))
            polarSpcPoints.append(PolarCoordConvert.convert(item, cornerPoints))
        # +++++++++++++++++++++++++++++ Summary statistics of observer FIG TWO
        distancesPoints = []
        distances = []
        for id in obIds:
            tOb.readRow(id)
            distances.append(tOb.m_distances)
            distancesPoints.append([[i, 0] for i in tOb.m_distances])

        den_dis = []
        for item in distances:
            tmp = np.linspace(min(item), max(item), 100)
            kde = stats.gaussian_kde(item)
            density = kde(tmp)
            den_dis.append([[tmp[i], density[i]] for i in range(len(density))])

        # +++++++++++++++++++++++++++++ Summary statistics of observer FIG THREE
        aiccList, bestFitAicc, bestMix_compNum, bestMix_aicc, maxComp_Ob, minComp_Ob, maxComp_AllOb = \
            gedfw.getDetailByObs(obIds, bestEstIds)
        # +++++++++++++++++++++++++++++ Summary statistics of observer FIG FOUR
        bestEst_maxComp, den_mix_best = gedfw.getBestEstimatorDetail(bestEstIds, distances)

        return render_template("run_results.html", runResults=runResults, centersFoundByOb=centersFoundByOb,
                               polarDataset=polarDataset, polarCenters=polarCenters, polarEstCenters=polarEstCenters,
                               polarSpcPoints=polarSpcPoints, distances=distancesPoints, den_dis=den_dis,
                               aiccList=aiccList, bestFitAicc=bestFitAicc, maxComp_AllOb=maxComp_AllOb,
                               distances_best=distancesPoints, den_mix_best=den_mix_best, bestMix_aicc=bestMix_aicc,
                               bestEst_maxComp=bestEst_maxComp, maxIter_Filter=5, bestMix_compNum=bestMix_compNum,
                               maxComp_Ob=maxComp_Ob, minComp_Ob=minComp_Ob)
        """
        # +++++++++++++++++++++++++++++ Summary statistics of observer FIG ONE
        minVec = np.min(dataset, axis=0)
        maxVec = np.max(dataset, axis=0)
        cornerPoints = [
            [minVec[0], minVec[1]],
            [maxVec[0], minVec[1]],
            [maxVec[0], maxVec[1]],
            [minVec[0], maxVec[1]],
            [minVec[0], minVec[1]]
        ]
        polarDataset = []
        polarCenters = []
        polarEstCenters = []
        polarSpcPoints = []
        for i, item in enumerate(observer):
            polarDataset.append(PolarCoordConvert.convert(item, dataset))
            polarCenters.append(PolarCoordConvert.convert(item, trueCenters))
            polarEstCenters.append(PolarCoordConvert.convert(item, centersFoundByOb[i]))
            polarSpcPoints.append(PolarCoordConvert.convert(item, cornerPoints))
        # +++++++++++++++++++++++++++++ Summary statistics of observer FIG TWO
        distancesPoints = []
        distances = []
        for id in obIds:
            tOb.readRow(id)
            distances.append(tOb.m_distances)
            distancesPoints.append([[i, 0] for i in tOb.m_distances])

        den_dis = []
        for item in distances:
            tmp = np.linspace(min(item), max(item), 100)
            kde = stats.gaussian_kde(item)
            density = kde(tmp)
            den_dis.append([[tmp[i], density[i]] for i in range(len(density))])

        # +++++++++++++++++++++++++++++ Summary statistics of observer FIG THREE
        aiccList, bestFitAicc, bestMix_compNum, bestMix_aicc, maxComp_Ob, minComp_Ob, maxComp_AllOb = \
            gedfw.getDetailByObs(obIds, bestEstIds)
        # +++++++++++++++++++++++++++++ Summary statistics of observer FIG FOUR
        bestEst_maxComp, den_mix_best = gedfw.getBestEstimatorDetail(bestEstIds, distances)

        return render_template("run_results.html", runResults=runResults, centersFoundByOb=centersFoundByOb,
                               polarDataset=polarDataset, polarCenters=polarCenters, polarEstCenters=polarEstCenters,
                               polarSpcPoints=polarSpcPoints, distances=distancesPoints, den_dis=den_dis,
                               aiccList=aiccList, bestFitAicc=bestFitAicc, maxComp_AllOb=maxComp_AllOb,
                               distances_best=distancesPoints, den_mix_best=den_mix_best, bestMix_aicc=bestMix_aicc,
                               bestEst_maxComp=bestEst_maxComp, maxIter_Filter=5, bestMix_compNum=bestMix_compNum,
                               maxComp_Ob=maxComp_Ob, minComp_Ob=minComp_Ob)
        """

    if request.method == 'POST':
        print('enter run_results POST')

        if requestArgs.get("code") == 'filterProcess_idx':
            print('enter run_results POST filterProcess_idx')
            obIdx = int(requestArgs.get("obId"))
            numOfComp = int(requestArgs.get("numOfComp"))
            runnerId = int(requestArgs.get("runnerId"))
            compIdx = int(requestArgs.get("filterProcess_idx"))
            wClusterEstimator, clsName = getWebEstimatorClass(runnerId)
            # ---------------------
            tOb = tObserver(app.config['MySqlConn'])
            tRunner = tIniceRunner(app.config['MySqlConn'])
            tRunner.readRow(runnerId)
            gedfw = wClusterEstimator(app.config['MySqlConn'])
            ds = Dataset.fromDB(app.config['MySqlConn'], tRunner.datasetId)

            sessionObDetail = tOb.getObserverDetailBySession(runnerId)

            obIds = []
            for row in sessionObDetail:
                obIds.append(row[0])
            obId = obIds[obIdx]

            tEstId = gedfw.getEstIdBasedObAndCompnum(obId, numOfComp)
            estProbMat = gedfw.getProbMat(tEstId)

            if len(estProbMat.shape) == 1:
                estProbMat = estProbMat[:, np.newaxis]
            label = np.argmax(estProbMat, axis=1)
            compPointIds = np.where(label == compIdx)[0]
            compPointIds = compPointIds.tolist()

            tFilter = tHighDensityFilter(app.config['MySqlConn'])
            # ---------------------

            # filter process
            dataset_array = ds.t.m_dataset

            if ds.getNumOfAttributes() > 2:
                dataset_array = DimensionReduction.decomposition_MutliVari(True, 2, dataset_array)

            compDataset_array = dataset_array[compPointIds, :]
            filterDetail = tFilter.getFilterDetailByEst(clsName, tEstId, compIdx)
            filterProcess_name = []
            filterProcess_ids = []
            for row in filterDetail:
                filterProcess_name.append(row[4])
                filterProcess_ids.append(json.loads(row[3])['ids'])

            ellData = [GetEllipse.get(dataset_array[item, :]).tolist() for item in filterProcess_ids]
            ellData_test = [dataset_array[item, :].tolist() for item in filterProcess_ids]

            treeDict = createTreeDict(app.config['MySqlConn'], clusterEstimatorId=tEstId,
                                      clusterEstimatorClassName=clsName, compId=compIdx, start=compPointIds)
            # return jsonify({'dataset_test': compDataset_array.tolist(), 'filterProcess': filterProcess_name,
            #                 'ellData': ellData, 'treeData': treeDict, 'ellData_test': ellData_test, 'dataset': dataset_array.tolist()})
            return jsonify({'dataset_test': compDataset_array.tolist(), 'filterProcess': filterProcess_name,
                            'ellData': ellData, 'treeData': treeDict, 'ellData_test': ellData_test,
                            'dataset': dataset_array.tolist()})

        if requestArgs.get("code") == 'obId':
            print('enter run_results POST obId')
            obIdx = int(requestArgs.get("obId"))
            numOfComp = int(requestArgs.get("numOfComp"))
            runnerId = int(requestArgs.get("runnerId"))
            wClusterEstimator, clsName = getWebEstimatorClass(runnerId)

            tOb = tObserver(app.config['MySqlConn'])
            tRunner = tIniceRunner(app.config['MySqlConn'])
            tRunner.readRow(runnerId)
            gedfw = wClusterEstimator(app.config['MySqlConn'])
            ds = Dataset.fromDB(app.config['MySqlConn'], tRunner.datasetId)

            sessionObDetail = tOb.getObserverDetailBySession(runnerId)

            obIds = []
            for row in sessionObDetail:
                obIds.append(row[0])
            obId = obIds[obIdx]

            observer = json.loads(sessionObDetail[obIdx][4])['vp']

            tEstId = gedfw.getEstIdBasedObAndCompnum(obId, numOfComp)
            # +++++++++++++++++++++++++++++ Filtering process of observer b, number of mixture components FIG ONE

            tOb.readRow(obId)
            distances = tOb.m_distances
            bd = (max(distances)-min(distances)) / len(distances) * 10
            st = np.linspace(min(distances), max(distances), 1000)
            kde = KernelDensity(bandwidth=bd).fit(np.array(distances)[:, np.newaxis])
            density = np.exp(kde.score_samples(st[:, np.newaxis]))
            density = [[i, j] for i, j in zip(st, density)]

            minima = min(distances)
            maxima = max(distances)
            distances = [[i, 0] for i in distances]
            st = np.linspace(minima, maxima, 1000)

            den_mix, den_mix_sum = gedfw.getDen(tEstId, numOfComp, st)

            if ds.getNumOfAttributes() > 2:

                dataset = ds.t.m_dataset
                trueCenters = ds.getTrueCentres()
                observer = np.array(observer)
                observer = observer[np.newaxis, :]

                dataset, trueCenters, observer = DimensionReduction.decomposition_MutliVari(False, 2, dataset, trueCenters, observer)
                observer = np.squeeze(observer)
            else:
                dataset = ds.t.m_dataset.tolist()
                trueCenters = ds.getTrueCentres().tolist()
            polarDataset_mix, polarCenters_mix = gedfw.getPolar(tEstId, numOfComp, dataset, trueCenters, observer)

            estAbs, estPara = gedfw.getEstAbsAndResult(tEstId)

            return jsonify({'den_mix': den_mix, 'distances': distances, 'den_mix_sum': den_mix_sum, 'density': density,
                           'obId': obId, 'numOfComp': numOfComp, 'polarDataset_mix': polarDataset_mix,
                            'polarCenters_mix': polarCenters_mix, 'estAbs': estAbs, 'estPara': estPara})

        # RECEIVE SUBMIT ==================================================================
        checkAdmin()
        print('_______________________ i-Nice Running ..., Return results later.. ___________________')

        form = request.form
        configName = form['configName']
        # oldRunningConfigId = int(form['oldRunningConfig'])
        dsId = int(form['dataset'])
        obGen = form['obGen']
        numOfOb = int(form['numOfOb'])
        clusterEstimatorClassName = form['estimator1']
        one2oneClasName = form['p2pDis1']
        n2nClassName = form['n2nDis1']
        filterCfgJSONDict = {'KdeFilter': {'topPer': 0.9}, 'DensityPeakFilter': {'cutoffPer': 0.1}}

        # for i in range(1, int(numOfOb) + 1):
        #     print(request.form['p2pDis' + str(i)])
        #     print(request.form['n2nDis' + str(i)])
        #     print(request.form['estimator' + str(i)])
        #     print(request.form['estimatorConfigId' + str(i)])

        # call iniceRunner
        runner = INiceRunner()
        runner.setConfig(sqlConn=app.config['MySqlConn'], dsId=dsId, name=configName,
                         observerGenClassName=obGen, nObservers=numOfOb,
                         clusterEstimatorClassName=clusterEstimatorClassName,
                         one2oneClasName=one2oneClasName, n2nClassName=n2nClassName,
                         maxob=20, minob=1,
                         filterCfgJSONDict=filterCfgJSONDict, mixedParamsCfgJSONDict=None)
        status = runner.run()
        if status:
            print('_______________________ i-Nice run successfully ..., Return results now  ___________________')
            return redirect('run_results?id=' + str(runner.dbId))
        print('_______________________ i-Nice run fail ___________________')
        return {}


def createTreeDict(sqlconn, clusterEstimatorId, clusterEstimatorClassName, compId, start):
    f = tHighDensityFilter(sqlconn)
    rootList = f.getRootsFrom(clusterEstimatorId, clusterEstimatorClassName)
    if not rootList:
        return {}
    for root in rootList:
        if root.whichComponent == compId:
            f.readTree(root.id)
            treeDict = {'name': start, 'value': []}
            DFS([f], treeDict)
            return treeDict
    return {}


def DFS(childrenList, treeDict):
    if childrenList is None:
        return
    children = []
    for child in childrenList:
        item = {'name': child.outputIds, 'value': [child.filterClassName, child.filterParams]}
        DFS(child.m_childList, item)
        children.append(item)

    treeDict['children'] = children


@ac.route('/login', methods=['GET', 'POST'])
def login():
    print('--------enter login---------: ', request.method)

    if request.method == 'GET':
        return render_template('login.html')

    if request.method == 'POST':
        username = request.form['user']
        pwd = request.form['pwd']

        u = tUser(app.config['MySqlConn'])
        u.createTable()
        if u.isUserExist(username, pwd):
            session['user'] = username
            flash("Login Succesfully")
            return redirect('running_config')

        flash("Login Fail")
        return render_template('login.html')


@ac.route('/')
def index():
    return redirect('/login')


@ac.route('/logout')
def logout():
    del session['user']
    return redirect('/login')


def getWebEstimatorClass(runnerid):
    # get cluster estimator type
    t = tObserver(app.config['MySqlConn'])
    rows = t.getObConfigDetail(runnerid)
    clusterEstimatorClassName = rows[0][2]  # TODO: type of estimator is identified as estimator type of first observer
    cluEstClsName = 'w' + clusterEstimatorClassName
    return MyTools.getClass(cluEstClsName + '.' + cluEstClsName), clusterEstimatorClassName


def checkAdmin():
    if session['user'].split('_')[0] != 'adm':
        print('Guest mode, unable to start running task')
        return redirect('login')