'''
Author: devis.dong
Email: devis.dong@gmail.com
Date: 2022-10-19 19:21:15
LastEditTime: 2023-01-04 17:41:54
LastEditors: devis.dong
FilePath: \server\sqltb.py
Description:
'''
from sqldb import *
# import importlib
from pointsiab import *
import ipfshttpclient
from mydefine import *

import json
import multiaddr.codecs.idna
import multiaddr.codecs.uint16be
import multiaddr.codecs.ip4


def loadModel(modelname:str, ckpt:str):
    # create model
    # mmodel = importlib.import_module(modelname)
    model = getModel()

    epoch, accu_all, accu_pos, accu_neg, accu_cls = load_checkpoint(model, "%s.pth" % ckpt)
    logger.info('starting from epoch %d, accu_all %.4f, accu_pos %.4f, accu_neg %.4f, accu_cls %.4f' % (epoch, accu_all, accu_pos, accu_neg, accu_cls))

    model.to('cpu') # TODO: CUDA
    model = model.eval()

    return model

# # 加载网络模型
# fnet = loadModel("pointsia-b", "pointsia-b_best_neg")

# db_test3d = DBMysql(host="localhost", user="root", passwd="123456", database="test3d")

class TBClass():
    def __init__(self, name:str, tbrow) -> None:
        self.name:str = name # 表名
        self.cols = list(tbrow.__dict__) # 列名

class TBModel(TBClass):
    class TableRow():
        def __init__(self, category='', name='', type='', tags='', description='', feature=None, data=None, hash='', extend='', trans_model='', trans_camera='') -> None:
            self.category:str=category
            self.name:str=name
            self.type:str=type
            self.tags:str=tags
            self.description:str=description
            self.feature:str=feature
            self.data:str=data
            self.hash:str=hash
            self.extend:str=extend
            self.trans_model:str=trans_model
            self.trans_camera:str=trans_camera

    def __init__(self, database, feature_net, ipfs_url) -> None:
        super().__init__('tb_model', self.TableRow())
        # self.name = 'tb_model' # 表名
        # self.cols = list(self.TableRow.__dict__) # 列名
        self.fnet = feature_net
        self.db = database
        self.ipfs_url = ipfs_url

    def clear(self):
        self.db.sqlexe('TRUNCATE TABLE %s' % self.name)

    def insert(self, rows:list):
        # sqlcmd_count = "select count(*) from tb_model"
        # status_count, results_count = self.db.sqlexe(sqlcmd_count)
        # numbercount=results_count[0][0]+1
        # ipfs
        results_sql=[]
        print(f'rows: {rows}')
        
        sql_updateCount = "UPDATE tb_info SET count1 = (SELECT COUNT(*) FROM tb_model) where assetType==%s" % (ASSET_MODEL)
        status, _ = self.db.sqlexe(sql_updateCount)

        if status != None:
            return status

        status, id_count = self.db.sqlexe("SELECT count1 FROM tb_info WHERE assetType==%s" % (ASSET_MODEL))
        id_count = list(id_count[0])[0] + 1

        if status != None:
            return status

        curScs = set()

        try:
            client = ipfshttpclient.connect(self.ipfs_url)
        except Exception as e:
            logger.error(e)
            return str(e)
        for row in rows:
            curScs.add(row.description)
            result_dict = row.__dict__
            file_ipfs=result_dict['hash']
            # # 该方法传入的参数，是需要上传到IPFS的文件的本地路径，上传成功会返回改文件的一些重要信息。
            res = client.add(file_ipfs)
            result_dict['hash'] = res['Hash']
            result_sql = list(result_dict.values())
            result_sql.insert(0, id_count)
            results_sql.append(result_sql)
            id_count += 1

        sql_insert2model = "INSERT INTO tb_model(id, category, name, type, tags, description, feature, data, hash, extend, trans_model, trans_camera) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?);"
        status, _ = self.db.sqlexe(sql_insert2model, results_sql)

        if status != None:
            return status

        category = rows[0].category
        status, scs = self.db.sqlexe(f'select name from tb_sc where category=="{category}"')

        if status != None:
            return status

        oldScs = set([s[0] for s in scs])
        newScs = list(curScs - oldScs)
        print(f'old {oldScs}, new: {newScs}')
        newScs = [[s] for s in newScs]
        sql_insert2sc = f'insert into tb_sc (name, category) VALUES (?, "{category}");'
        status, _ = self.db.sqlexe(sql_insert2sc, newScs)

        if status != None:
            return status

        status, _ = self.db.sqlexe(sql_updateCount)

        return status

    def search_by_feature(self, points:torch.tensor, category=None):
        '''
        Do not set categorical values when searching by file in internal interfaces
        '''
        f0 = self.fnet.feature(points) # TODO: CUDA
        print('searching by feature ...')
        if category is not None:
            status, rows = self.db.sqlexe('select * from %s where category = "%s"' % (self.name, category))
        else:
            status, rows = self.db.sqlexe('select * from %s' % (self.name))
        models = {"titles": ['id', 'name', 'type', 'tags', 'hash', 'trans_model', 'trans_camera', 'similarity'], "values": []}
        try:
            client = ipfshttpclient.connect(self.ipfs_url)
        except Exception as e:
            logger.error(e)
            return str(e), {}
        for (id, category, name, type, tags, description, feature, data, hash, extend, trans_model, trans_camera) in rows:
            f1 = str2tensor(feature) # TODO: CUDA
            similarity = self.fnet.fsimilarity(f0, f1)
            similarity = similarity.item()
            fcache = os.path.abspath(f'cache/{hash}.{extend}')
            if not os.path.exists(fcache):
                result = client.cat(hash)
                with open(fcache,'wb') as cfile:
                    cfile.write(result)
                    cfile.flush()

            models["values"].append([("%d" % id), name, type, tags, fcache, json.loads(trans_model), json.loads(trans_camera), ("%.4f" % similarity) ])
        models["values"].sort(reverse=True, key=lambda elem: elem[-1]) #按照相似度排序
        # 只返回前10个结果
        if len(models['values']) >= 10:
            models['values'] = models['values'][:10]
        print('search by feature done.')
        return status, models

    def search_by_label(self, label, category):
        print('searching by label ...')
        status, rows = self.db.sqlexe('select * from tb_model where tags = "%s" and category = "%s"' % (label, category))
        models = {"titles": ['id', 'name', 'type', 'tags', 'hash', 'trans_model', 'trans_camera'], "values": []}
        try:
            client = ipfshttpclient.connect(self.ipfs_url)
        except Exception as e:
            logger.error(e)
            return str(e), {}
        for (id, category, name, type, tags, description, feature, data, hash, extend, trans_model, trans_camera) in rows:
            fcache = os.path.abspath(f'cache/{hash}.{extend}')
            if not os.path.exists(fcache):
                result = client.cat(hash)
                with open(fcache,'wb') as cfile:
                    cfile.write(result)
                    cfile.flush()
            # print(f'cache file in: "{fcache}"')
            models["values"].append([("%d" % id), name, type, tags, fcache, json.loads(trans_model), json.loads(trans_camera)])
        print('search by label done.')
        return status, models


    def search_by_type(self, label, category):
        print('searching by type ...')
        status, rows = self.db.sqlexe('select * from tb_model where type = "%s" and category = "%s"' % (label, category))
        models = {"titles": ['id', 'name', 'type', 'tags', 'hash', 'trans_model', 'trans_camera'], "values": []}
        try:
            client = ipfshttpclient.connect(self.ipfs_url)
        except Exception as e:
            logger.error(e)
            return str(e), {}
        for (id, category, name, type, tags, description, feature, data, hash, extend, trans_model, trans_camera) in rows:
            fcache = os.path.abspath(f'cache/{hash}.{extend}')
            if not os.path.exists(fcache):
                result = client.cat(hash)
                with open(fcache,'wb') as cfile:
                    cfile.write(result)
                    cfile.flush()
            # print(f'cache file in: "{fcache}"')
            models["values"].append([("%d" % id), name, type, tags, fcache, json.loads(trans_model), json.loads(trans_camera) ])
        print('search by type done.')
        return status, models


    # 导入.txt的点云数据文件到数据库
    # def import_model(self, filepath, category='', tags='', description=''):
    #     points = read_pointfile(filepath)
    #     feature = self.fnet.feature(torch.tensor(points).cuda())
    #     feature = tensor2str(feature)
    #     rows = [self.TableRow(category, tags, feature, description, filepath),]
    #     self.insert(rows)

    def upload_models(self, models:list):
        status = self.insert(models)
        return status

class TBUser(TBClass):
    class TableRow():
        def __init__(self, category='', tags='', feature='', description='', filepath='') -> None:
            self.name:str=category
            self.password:str=tags
            self.email:str=feature

    def __init__(self) -> None:
        super().__init__('tb_user', self.TableRow())
