# _*_coding:utf-8_*_

from . import DbBase
from . import DmConstant
from . import DbConfig
import traceback
import time
import os
import sys

from ATS.util import constant
import copy
from utils.log_util import loggings as logger

start_dir = os.getcwd()
sys.path.insert(0, os.path.dirname(start_dir))
BUSINESS_IDS = {}


def is_db_server_done():
    tmp_path = os.environ["TMP"]
    f_pant = os.path.join(tmp_path, "ats.ats")
    return os.path.exists(f_pant)


def on_project_started():
    try:
        tmp_path = os.environ["TMP"]
        f_pant = os.path.join(tmp_path, "ats.ats")
        if os.path.exists(f_pant):
            os.remove(f_pant)
    except:
        pass


def on_project_stoped():
    # os.environ根据一个字符串可以映射到一个对象
    tmp_path = os.environ["TMP"]
    with open(os.path.join(tmp_path, "ats.ats"), 'w') as f:
        f.write("v=-1")


def on_project_going(value):
    try:
        tmp_path = os.environ["TMP"]
        with open(os.path.join(tmp_path, "ats.ats1"), 'w') as f:
            f.write("[COMMON]\n")
            f.write("v=%d" % value)
    except:
        logger.error("Error while on_project_going: %s" % traceback.format_exc())


def fetch_data_con_by_file(db_file):
    exist_flag = True
    if not os.path.exists(db_file):
        exist_flag = False
    return exist_flag, DbBase.DbBase(db_file)


def fetch_data_con_by(ppf):
    """
    ppf: XXX.ppf
    """
    logger.debug("Fetch by ppf: %s!" % ppf)
    if ppf is None:
        ppf = DmConstant.DEFAULT_DB
    result = os.path.splitext(ppf)[0]
    logger.debug("result of Fetch by ppf: %s!" % result)
    exist_flag = True
    prj_dir = os.path.join(constant.BASE_DIR, result)
    logger.debug("prj_dir of Fetch by ppf: %s!" % prj_dir)
    if not os.path.exists(prj_dir):
        os.makedirs(prj_dir)
    db_file = os.path.join(prj_dir, "%s%s" % (result, constant.DB_FILE_POST))
    logger.debug("db_file of Fetch by ppf: %s!" % db_file)
    if not os.path.exists(db_file):
        exist_flag = False
    return exist_flag, DbBase.DbBase(db_file)


def fetch_data_con():
    ppf = 'ATS_SingleTestPlan.ppf'
    result = os.path.splitext(ppf)[0]
    prj_dir = os.path.join(constant.BASE_DIR, result)
    if not os.path.exists(prj_dir):
        os.makedirs(prj_dir)
    db_file = os.path.join(prj_dir, "%s%s" % (result, constant.DB_FILE_POST))
    return DbBase.DbBase(db_file)


def fetch_data_model():
    return DbImpl()


class DbImpl:
    def __init__(self, db_file=None, i_lev=None):
        self.file = db_file
        if db_file:
            flag = True
            if not os.path.exists(db_file):
                flag = False
            self.db = DbBase.DbBase(db_file, i_lev)
            if not flag:
                self.build_base_tables(True)
        else:
            self.db = None
        self.prj_run_id = -1
        self.test_type = "GeneralTest"  # temporary use this. should be dynamically fetch.
        self.test_table_map = {}
        self.data_table_model = {}  # for columns for output data table

    def copyNew(self, db_model):
        self.test_table_map = db_model.test_table_map
        self.data_table_model = db_model.data_table_model
        self.prj_run_id = db_model.prj_run_id

    def set_connection(self, con):
        try:
            self.db = con
            # self.db.close()
        except:
            pass

    def release_con(self):
        if self.db:
            self.db.close()

    def register_test(self, test, test_type):
        tab_name = DbConfig.table_models[8].name
        cols = DbConfig.table_models[8].columns
        condi = {cols[1]: test}
        if self.db.is_row_exist(tab_name, condi):
            condi[cols[0]] = test_type
            if self.db.is_row_exist(tab_name, condi):
                return
            else:
                logger.error("Name %s is in used, cannot be registered with test type %s!" % (test, test_type))
        else:
            insert_di = {cols[0]: test_type, cols[1]: test}
            try:
                self.db.insert_row(tab_name, commit_flag=True, **insert_di)
            except:
                logger.error("Error while registering test %s with test type %s!" % (test, test_type))

    def register_output_models(self, test_dict):
        """ *Should carry test type information. Currently not implemented*
            test_dict:
            {
            "test1":['V_Pos','I_Pos','vth_T2'],
             "test2":['I_Gate','V_Gate']
             }
        """
        logger.debug('test_dict to register into output models:' + str(test_dict))
        if not test_dict:
            logger.error("Fault test information in SSPF file.")
            return None
        test_table_map = {}
        try:
            for test_key in test_dict:
                self.register_test(test_key, self.test_type)
                modles = test_dict.get(test_key)
                for i in range(len(modles)):
                    modles[i] = modles[i].strip()
                output_model = [modles, ['SDATA'] * len(modles)]
                data_table_name = self.__register_output_model(self.test_type, output_model)
                test_table_map[test_key] = data_table_name
                self.data_table_model[data_table_name] = modles
            self.test_table_map.update(test_table_map)
            self.register_table_maps()
            logger.debug('Source test_table_map in register output models:' + str(self.test_table_map))
            return test_table_map
        except:
            logger.error("Error occured while registering output models:\n%s" % traceback.format_exc())

    def __register_output_model(self, testType_name, output_model):
        """
            output_model: A list of 2 elements
            [
                ['I_Gate','V_Gate'],  ['float','float']
            ]

        """
        if not isinstance(output_model, list):
            logger.error("Invalid output model.")
            return
        output_list = output_model[0]
        sql = "select pkid,OUTPUT_FIELDS from TEST_OUTPUT_MODEL where TEST_TYPE=?"
        b = (testType_name,)
        result = self.db.query(sql, -1, b)
        find_one = False
        # This test type already has output model registered.
        target_pkid = -1
        if len(result) > 0:
            for result_ele in result:
                existed = result_ele[1]
                if len(output_list) != len(existed):
                    continue
                flag = True
                for item in output_list:
                    if existed.count(item) < 1:
                        flag = False
                        break
                if not flag:
                    continue
                find_one = True
                target_pkid = result_ele[0]
                break

        if not find_one:
            tab_obj = DbConfig.table_models[7]
            col_values = {tab_obj.columns[0]: testType_name, tab_obj.columns[1]: DbBase.SeqData(output_list),
                          tab_obj.columns[2]: DbBase.SeqData(output_model[1])}
            target_pkid = self.db.insert_row(tab_obj.name, True, **col_values)
        data_table_name = "%s_%d_OUTPUT" % (testType_name, target_pkid)
        if not self.db.is_table_exist(data_table_name):
            col_def = {'PROJECT_RUN_ID': 'INTEGER', 'GLOBAL_LOCATION_ID': 'INTEGER', 'TEST_NAME': 'TEXT',
                       'TIMESTAMP': 'DATETIMEDATA'}
            if constant.ADD_BIN_SORT == 1:
                col_def['BIN_SORT'] = 'SEQDATA'
            if constant.CUSTOMIZE_FOR_LOOP == 1:
                col_def['LOOP_ID'] = 'INTEGER'
            for i, ele in enumerate(output_list):
                col_def[ele] = output_model[1][i]
            self.db.create_table(data_table_name, False, **col_def)
            logger.info("Success register new output data table: %s" % data_table_name)
        logger.info("Output data table already registered. Return directly.")
        return data_table_name

    def register_table_maps(self):
        logger.debug('Final test_table_map to be registed into db:' + str(self.test_table_map))
        if not self.test_table_map:
            logger.error("No valid test table map in data model instance!")
            return
        for test_name in self.test_table_map:
            table_name = self.test_table_map.get(test_name)
            self.__register_table_map(self.prj_run_id, self.test_type, test_name, table_name)

    def __register_table_map(self, pro_run_id, test_type, test_name, table_name):
        tab_obj = DbConfig.table_models[6]
        columns = tab_obj.columns
        values = {columns[0]: pro_run_id, columns[1]: test_type, columns[2]: test_name, columns[3]: table_name}
        try:
            resu = self.db.is_row_exist(tab_obj.name, values)
            logger.debug("test_map exists:" + str(resu) + "\n")
            if resu:
                return
            self.db.insert_row(tab_obj.name, True, **values)
        except:
            logger.error("Error occured while registering table map:\n%s" % traceback.format_exc())

    def __register_business(self):
        built_tables = []
        col_defs = DbConfig.table_models[0].columns
        tab_name = DbConfig.table_models[0].name
        if not self.db.is_table_exist(tab_name):
            raise Exception("BUSINESS table is not created!")

        busineses = DbConfig.get_setting_value('BUSINESS', 'ITEMS')
        if not busineses:
            raise Exception("No valid BUSINESS configuration!")

        b_list = busineses.split(',')
        for b in b_list:
            print("tab_name", b)
            b_value = DbConfig.get_setting_value(tab_name, b)
            if not b_value:
                logger.error("No valid configuration for BUSINESS %s" % b)
                continue
            b_v_list = b_value.split(',')
            insert_values = {}
            fields = []
            type_dict = {}
            for b_v in b_v_list:
                field, typ = b_v.split(':')
                fields.append(field)
                type_dict[field] = typ
            insert_values[col_defs[0]] = b
            insert_values[col_defs[1]] = DbBase.SeqData(fields)
            insert_values[col_defs[2]] = DbBase.SeqData(type_dict)
            base_para = {col_defs[0]: b}
            sql = "select pkid from %s where %s=?" % (tab_name, col_defs[0])
            flag = 0
            try:
                if self.db.is_row_exist(tab_name, insert_values):
                    res = self.db.query(sql, 1, (b,))
                    business_pkid = res[0]
                    flag = 1  # no need to create business model table
                elif self.db.is_row_exist(tab_name, base_para):
                    updates = {col_defs[1]: DbBase.SeqData(fields), col_defs[2]: DbBase.SeqData(type_dict)}
                    self.db.update_tabl(tab_name, base_para, updates)

                    res = self.db.query(sql, 1, (b,))
                    business_pkid = res[0]
                    flag = 2  # Need to drop the old one, then create a new business table.
                    # Meanwhile need to backup the old one with another table name.
                else:
                    business_pkid = self.db.insert_row(tab_name, True, **insert_values)
                    flag = 3  # drop and create new one.
                global BUSINESS_IDS
                BUSINESS_IDS[b] = business_pkid
                logger.debug("After register business: %s" % str(BUSINESS_IDS))
            except:
                logger.error("Error occured while inserting row for BUSINESS:\n%s" % traceback.format_exc())
                continue
            try:
                global_tab_name = "%s%d" % (DmConstant.GLOBAL_TABLE_NAME_PREFIX, business_pkid)
                if flag == 2:
                    t_stamp = time.strftime(DmConstant.TIMESTAMP_SIM, time.localtime())
                    #                    t_stamp = DbBase.adapt_TimeData(DbBase.TimeData(time.localtime()))
                    u_sql = "alter table %s rename to %s_%s" % (global_tab_name, global_tab_name, t_stamp)
                    self.db.exec_sql(u_sql)
                if flag in [2, 3]:
                    self.db.create_table(global_tab_name, **type_dict)
                    built_tables.append(global_tab_name)
            except:
                logger.error(
                    "Error occured while create global table for business %s:\n%s" % (b, traceback.format_exc()))
                continue
        return built_tables

    def register_test_task(self, test_name, location):
        if not test_name or not location:
            logger.error("Invalid value on test time or location.")
            return
        if not test_name.endswith('.py'):
            logger.error("Not support postfix of file:%s" % test_name)
            return
        test_name = test_name.split('.')[0]
        paras = {}

        # ----------TODO------------------
        # Should assign business and testype to testtask in sspf file.
        # currently get the first business as default.
        busi = DbConfig.businesses[0]
        cols = DbConfig.businesses[busi]
        for i, col in enumerate(cols):
            paras[col] = location[i]
        paras[DmConstant.GLOABAL_TEST_NAME_KEY] = test_name
        global BUSINESS_IDS
        pkid = BUSINESS_IDS[busi]
        table_name = "%s%d" % (DmConstant.GLOBAL_TABLE_NAME_PREFIX, pkid)
        try:
            return self.db.insert_row(table_name, commit_flag=True, **paras)
        except:
            logger.error("Error while inserting global location into table:\n%s" % traceback.format_exc())

    def register_globation_location(self, location_dict):
        """
            key: 1,val: (1, 'WafSite1', (0, 3), 'SS1', 'dv1', 'ptm5')
            key:2, val: (1, 'WafSite1', (0, 3), 'SS1', 'dv1', 'ptm6')
        """
        if not location_dict:
            logger.error("Empty location dict.")
            return
        index_locid_map = {}
        # TODO
        # should be kept with transition to roll back when exception occured.

        logger.debug("location_dict to register into db:" + str(location_dict) + "\n")
        for index in location_dict:
            loc = location_dict.get(index)
            logger.debug("each LOC :" + str(loc) + "\n")
            paras = {}

            # ----------TODO------------------
            # Should assign business and testype to testtask in sspf file.
            # currently get the first business as default.
            busi = DbConfig.businesses[0]
            cols = DbConfig.businesses_model[busi][0]
            for i, col in enumerate(cols):
                if i < len(loc):
                    paras[col] = str(loc[i])
            #            paras[DmConstant.GLOABAL_TEST_NAME_KEY] = test_name
            global BUSINESS_IDS
            logger.debug("Before get business: %s" % str(BUSINESS_IDS))
            pkid = BUSINESS_IDS[busi]
            table_name = "%s%d" % (DmConstant.GLOBAL_TABLE_NAME_PREFIX, pkid)
            try:
                check_exist_sql = 'select pkid form %s where '
                resu, pkid = self.db.is_row_exist_pkid(table_name, paras)

                lo = "##############resu,pkid:" + str(resu) + "||" + str(pkid) + "\n" \
                     + "##############resu:" + str(resu) + "\n"
                logger.debug(lo)
                if resu:
                    index_locid_map[index] = pkid
                    continue
                loc_id = self.db.insert_row(table_name, commit_flag=True, **paras)

                logger.debug("loc_id returned from insert location into db:" + str(loc_id) + "\n")

                index_locid_map[index] = loc_id
            except:
                logger.error("Error while inserting global location into table:\n%s" % traceback.format_exc())
                break
        return {constant.INDEX_LOC_MAP: index_locid_map}

    def build_base_tables(self, keep_existing=False):
        table_seq = DbConfig.get_table_sequence()
        table_defi = DbConfig.get_table_dict()
        built_tables = []
        for tab in table_seq:
            if tab not in table_defi:
                continue
            if keep_existing:
                if self.db.is_table_exist(tab):
                    continue
            tab_content = table_defi.get(tab)
            col_def = tab_content.get(DmConstant.COL_DEF)
            constra_list = tab_content.get(DmConstant.CONSTRA_DEF)
            try:
                self.db.create_table(tab, False, *constra_list, **col_def)
                built_tables.append(tab)
            except:
                logger.error("Error occured while creating table %s" % tab)
                continue

        try:
            res = self.__register_business()
            built_tables.extend(res)
        except:
            logger.error("Error occured while registering business:\n%s" % traceback.format_exc())

        try:
            self.__register_test_type()
        except:
            logger.error("Error occured while registering test type:\n%s" % traceback.format_exc())

        logger.info("Data base '%s' initialization finished. Created tables : %s" % (self.file, str(built_tables)))

    def __register_test_type(self):
        """
        Read system setting.ini file. get test types and insert into table.
        With existence check.
        """
        busineses = DbConfig.get_setting_value('BUSINESS', 'ITEMS')
        if not busineses:
            raise Exception("No valid BUSINESS configuration!")

        b_list = busineses.split(',')
        for busi in b_list:
            test_types = DbConfig.get_setting_value(busi, "test_types")
            if not test_types:
                raise Exception("No test type configured! Please add testtype in system configuration file.")
            test_type_list = test_types.split(',')
            tab_name = DbConfig.table_models[1].name
            columns = DbConfig.table_models[1].columns
            for t_type in test_type_list:
                conditions = {columns[0]: t_type}
                if not self.db.is_row_exist(tab_name, conditions):
                    val = {columns[0]: t_type, columns[1]: busi}
                    self.db.insert_row(tab_name, True, **val)

    def register_test_attribute(self, test_type_id, test_name, project_run_id=None, **attributes):
        """
            attributes:
            {'V_Gate':1.5, 'I_Gate':3}
        """
        if not project_run_id:
            project_run_id = self.db.get_latest_id(DbConfig.table_models[2].name)
        if project_run_id is None:
            logger.error("Error in register_test_attribute: No project run record found.")
            return
        map_tab_name = DbConfig.table_models[5].name
        map_tab_colums = DbConfig.table_models[5].columns
        del_conditions = {map_tab_colums[0]: project_run_id, map_tab_colums[1]: test_type_id,
                          map_tab_colums[2]: test_name}
        try:
            self.db.delete(map_tab_name, del_conditions)
        except:
            logger.error(
                "Error in clearing data from test attribute for test %s\n%s" % (test_name, traceback.format_exc()))

        base_tup = (project_run_id, test_type_id, test_name)
        val_list = []
        for key in attributes.keys():
            pa_tup = base_tup + (key, attributes[key])
            val_list.append(pa_tup)
        try:
            self.db.insert_many(map_tab_name, map_tab_colums, val_list, True)
        except:
            logger.error("Error in inserting test attribute for test %s\n%s" % (test_name, traceback.format_exc()))

    def on_project_run(self, pro_name, is_run=True, is_append=False, batch_num="", group_num="", prod_num="",
                       tech_phase=""):
        app_para = is_append == True and 1 or 0
        run_id = self.register_project_run(pro_name, is_run, app_para, bat_n=batch_num, group_n=group_num,
                                           pro_n=prod_num, tech_p=tech_phase)
        self.prj_run_id = run_id
        logger.debug('run_id:' + str(run_id))

    def update_project_run(self, pkid):
        sql = "update project_run_register set pkid=%d where rowid=(select max(rowid) from project_run_register)" % pkid
        self.db.exec_sql(sql)

    def register_project_run(self, pro_name, is_run=True, is_append=0, bat_n="", group_n="", pro_n="", tech_p=""):
        """
            is_run:  True to set timestamp.
                    False to set timestamp with None value
        """
        para = {}
        tab_name = DbConfig.table_models[2].name
        columns = DbConfig.table_models[2].columns
        para[columns[0]] = pro_name
        para[columns[1]] = ''
        time_stamp = None
        run_id = None
        if is_run:
            time_stamp = time.localtime()
        col_def = {}
        try:
            logger.debug('1234tab_name:' + str(tab_name))
            logger.debug('1234para:' + str(para))
            if not self.db.is_row_exist(tab_name, para):
                logger.debug('not self.db.is_row_exist(tab_name, para):')

                col_def[columns[0]] = pro_name
                col_def[columns[1]] = DbBase.TimeData(time_stamp)
                col_def[columns[2]] = is_append

                if bat_n:
                    col_def[columns[3]] = bat_n
                if group_n:
                    col_def[columns[4]] = group_n
                if pro_n:
                    col_def[columns[5]] = pro_n
                if tech_p:
                    col_def[columns[6]] = tech_p

                #                if constant.CUSTOMIZE_FOR_LOOP==1:
                #                    col_def[columns[3]] = bat_num

                #                sql = "select count(*) from PROJECT_RUN_REGISTER"
                #                icons = self.db.query(sql, 1)
                #                logger.debug('8888888res:'+str(icons))

                run_id = self.db.insert_row(tab_name, True, **col_def)

                #                sq1l = "select count(*) from PROJECT_RUN_REGISTER"
                #                res1 = self.db.query(sq1l, 1)
                #                logger.debug('00000000000000000res:'+str(res1))

                logger.debug('MMMMMMMMMMMMrun_id:' + str(run_id))
            else:
                logger.debug('self.db.is_row_exist(tab_name, para)')
                sql = "select pkid from %s where %s=? and %s=?" % (tab_name, columns[0], columns[1])
                result = self.db.query(sql, 1, (pro_name, ''))
                run_id = result[0]
                if not time_stamp:
                    return run_id
                conditions = {columns[0]: pro_name, columns[1]: ''}
                updates = {columns[1]: DbBase.TimeData(time_stamp)}

                if bat_n:
                    col_def[columns[3]] = bat_n
                if group_n:
                    col_def[columns[4]] = group_n
                if pro_n:
                    col_def[columns[5]] = pro_n
                if tech_p:
                    col_def[columns[6]] = tech_p
                self.db.update_tabl(tab_name, conditions, updates)
            self.prj_run_id = run_id
            logger.debug('Prj_run_id after register project run:' + str(self.prj_run_id))
            return run_id
        except:
            logger.error("Error occured while registering project run:\n%s" % traceback.format_exc())
            return None

    def save_project_files(self, pro_run_id, files):
        """
            files:   {'WDF':"D:/aaa.wdf"}
        """
        if not files:
            return
        values = []
        for key in files.keys():
            values.append((pro_run_id, key, DbBase.FileStreamData(files[key])))
        tab_obj = DbConfig.table_models[3]
        try:
            self.db.insert_many(tab_obj.name, tab_obj.columns, values, True)
        except:
            logger.error(
                "Error while inserting project files for project %d\n%s" % (pro_run_id, traceback.format_exc()))

    def save_project_property(self, pro_run_id, properties):
        """
            properties:  {'ITEM':'ITEM_VALUE'}
        """
        values = []
        for key in properties.keys():
            values.append((pro_run_id, key, DbBase.SeqData(properties[key])))
        tab_obj = DbConfig.table_models[4]
        try:
            self.db.insert_many(tab_obj.name, tab_obj.columns, values, True)
        except:
            logger.error(
                "Error while inserting project properties for project %d\n%s" % (pro_run_id, traceback.format_exc()))

    def get_output_table_name(self, pro_run_id, test_type_id, test_name):
        table_name = DbConfig.table_models[6].name
        columns = DbConfig.table_models[6].columns

        sql = "select %s from %s where %s=? and %s=? and %s=?" % (
            columns[3], table_name, columns[0], columns[1], columns[2],)
        val_tup = (pro_run_id, test_type_id, test_name)
        result = self.db.query(sql, 1, val_tup)
        if result:
            return result[0]
        logger.error("Error to get output data name: Cannot find a table map registration for test:%s" % test_name)

    def populate_for_data_save(self, post_data):
        pass

    def persist_data_original(self, location_id, location, data):
        """
            location_id: pkid of location table
            data: {
            'V_Gate':[1,2,3],
            'I_Gate':[2,3,4]
            }
        """
        logger.debug("test_table_map when persist data:" + str(self.test_table_map) + "\n")
        test_name = location[-1].lower()
        logger.debug('test_name : ' + test_name)
        output_table_name = self.test_table_map.get(test_name, None)
        if not output_table_name:
            logger.error("Cannot get output table name when persist data to db!")
            return
        output_columns = self.data_table_model.get(output_table_name, None)
        logger.debug("output_columns in persit data to db:" + str(output_columns) + "\n")
        if not output_columns:
            logger.error("Cannot get output table columns model!")
            return
        param_columns = []
        for real_col in data.keys():
            if output_columns.count(real_col) < 1:
                logger.error("No field in output data table match the test output item: %s" % real_col)
                continue
            param_columns.append(real_col)
        logger.debug("param_columns before save output data to db:" + str(param_columns) + "\n")
        try:
            self.__save_output_data_for_Manual(test_name, location_id, output_table_name, param_columns, data)
        except:
            logger.error("Error occured while saving output data for test %s in table %s:\n%s" % (
                test_name, output_table_name, traceback.format_exc()))

    def persist_data(self, location_id, location, data):
        """
            location_id: pkid of location table
            data: {
            'V_Gate':[1,2,3],
            'I_Gate':[2,3,4]
            }
        """
        logger.debug("test_table_map when persist data:" + str(self.test_table_map) + "\n")
        test_name = location[-1].lower()
        logger.debug('test_name : ' + test_name)
        output_table_name = self.test_table_map.get(test_name, None)
        if not output_table_name:
            logger.error("Cannot get output table name when persist data to db!")
            return
        output_columns = self.data_table_model.get(output_table_name, None)
        logger.debug("output_columns in persit data to db:" + str(output_columns) + "\n")
        if not output_columns:
            logger.error("Cannot get output table columns model!")
            return
        param_columns = []
        for real_col in data.keys():
            if output_columns.count(real_col) < 1:
                logger.error("No field in output data table match the test output item: %s" % real_col)
                continue
            param_columns.append(real_col)
        logger.debug("param_columns before save output data to db:" + str(param_columns) + "\n")
        try:
            if constant.CUSTOMIZE_FOR_LOOP == 1:
                self.__save_output_data_CL(test_name, location_id, output_table_name, output_columns, data)
            else:
                self.__save_output_data(test_name, location_id, output_table_name, param_columns, data)
        except:
            logger.error("Error occured while saving output data for test %s in table %s:\n%s" % (
                test_name, output_table_name, traceback.format_exc()))

    def __save_output_data_CL(self, test_name, loop_id, output_table_name, output_columns, populated_data):
        """
            populated_data:
            {'V_Gate':[1,2,3],'I_Gate':[3,4,5]}
        """
        logger.debug('populated_data: ' + str(populated_data))
        pro_run_id = self.prj_run_id
        #        values = []
        #        cols = ['PROJECT_RUN_ID','GLOBAL_LOCATION_ID','TEST_NAME']
        #
        #        val_tup = (self.prj_run_id,location_id,test_name)  # whether need to carry test_name here?
        param = {'PROJECT_RUN_ID': self.prj_run_id, 'LOOP_ID': loop_id, 'TEST_NAME': test_name}
        import datetime
        time_stamp = datetime.datetime.now()
        param['TIMESTAMP'] = DbBase.DateTimeData(time_stamp)
        updates = {}
        for col in output_columns:
            output_values = populated_data.get(col, None)
            if not output_values:
                continue
            logger.debug('output_value in __save_output_data(): ' + str(output_values))
            param[col] = DbBase.SeqData(output_values)
            updates[col] = DbBase.SeqData(output_values)
        logger.debug('output_table_name in __save_output_data():', output_table_name)
        logger.debug('param in __save_output_data():', param)
        conditions = {'PROJECT_RUN_ID': self.prj_run_id, 'LOOP_ID': loop_id, 'TEST_NAME': test_name}
        if constant.CUSTOMIZE_FOR_LOOP == 1:
            self.db.insert_row(output_table_name, commit_flag=False, **param)
        else:
            if not self.db.is_row_exist(output_table_name, conditions):
                self.db.insert_row(output_table_name, commit_flag=True, **param)
            else:
                logger.debug('updates in save_output_data():', updates)
                self.db.update_tabl(output_table_name, conditions, updates)

    def __save_output_data_for_Manual(self, test_name, loop_id, output_table_name, output_columns, populated_data):
        """
            populated_data:
            {'V_Gate':[1,2,3],'I_Gate':[3,4,5]}
        """
        logger.debug('populated_data: ' + str(populated_data))
        pro_run_id = self.prj_run_id
        #        values = []
        #        cols = ['PROJECT_RUN_ID','GLOBAL_LOCATION_ID','TEST_NAME']
        #
        #        val_tup = (self.prj_run_id,location_id,test_name)  # whether need to carry test_name here?
        param = {'PROJECT_RUN_ID': self.prj_run_id, 'LOOP_ID': loop_id, 'TEST_NAME': test_name}
        import datetime
        time_stamp = datetime.datetime.now()
        param['TIMESTAMP'] = DbBase.DateTimeData(time_stamp)
        updates = {}
        for col in output_columns:
            output_values = populated_data.get(col, None)
            if not output_values:
                continue
            logger.debug('output_value in __save_output_data(): ' + str(output_values))
            param[col] = DbBase.SeqData(output_values)
            updates[col] = DbBase.SeqData(output_values)
        logger.debug('output_table_name in __save_output_data():', output_table_name)
        logger.debug('param in __save_output_data():', param)
        conditions = {'PROJECT_RUN_ID': self.prj_run_id, 'LOOP_ID': loop_id, 'TEST_NAME': test_name}
        if not self.db.is_row_exist(output_table_name, conditions):
            self.db.insert_row(output_table_name, commit_flag=True, **param)
        else:
            logger.debug('updates in save_output_data():', updates)
            self.db.update_tabl(output_table_name, conditions, updates)

    def __save_output_data(self, test_name, location_id, output_table_name, output_columns, populated_data):
        """
            populated_data:
            {'V_Gate':[1,2,3],'I_Gate':[3,4,5]}
        """
        logger.debug('populated_data: ' + str(populated_data))
        pro_run_id = self.prj_run_id
        #        values = []
        #        cols = ['PROJECT_RUN_ID','GLOBAL_LOCATION_ID','TEST_NAME']
        #
        #        val_tup = (self.prj_run_id,location_id,test_name)  # whether need to carry test_name here?
        param = {'PROJECT_RUN_ID': self.prj_run_id, 'GLOBAL_LOCATION_ID': location_id, 'TEST_NAME': test_name}
        import datetime
        time_stamp = datetime.datetime.now()
        param['TIMESTAMP'] = DbBase.DateTimeData(time_stamp)
        updates = {}
        for col in output_columns:
            output_values = populated_data.get(col, None)
            if not output_values:
                continue
            logger.debug('output_value in __save_output_data(): ' + str(output_values))
            param[col] = DbBase.SeqData(output_values)
            updates[col] = DbBase.SeqData(output_values)
        logger.debug('output_table_name in __save_output_data():', output_table_name)
        logger.debug('param in __save_output_data():', param)
        conditions = {'PROJECT_RUN_ID': self.prj_run_id, 'GLOBAL_LOCATION_ID': location_id, 'TEST_NAME': test_name}
        if not self.db.is_row_exist(output_table_name, conditions):
            print("output_table_name", output_table_name)
            print("conditions", conditions)
            print("param", param)
            self.db.insert_row(output_table_name, commit_flag=True, **param)
        else:
            logger.debug('updates in save_output_data():', updates)
            self.db.update_tabl(output_table_name, conditions, updates)

        """
            populated_data:
            {
                'location_id1':{'V_Gate':[1,2,3],'I_Gate':[2,3,4]},
                'location_id2':{'V_Source':[3,4,5], 'I_Source':[5,6,7]}
            }
        """

    #        for i,key in enumerate(populated_data.keys()):
    #            val_tup = (pro_run_id,key,test_name)
    #            for output_name in output_columns:
    #                if i==0:
    #                    cols.append(output_name)
    #                val_tup = val_tup + (DbBase.SeqData(populated_data[key][output_name]),)
    #            values.append(val_tup)
    #        self.db.insert_many(output_table_name, cols, values, True)

    def __get_business_by(self, test_name):
        test_register_tab_name = DbConfig.table_models[8].name
        test_type_tab_name = DbConfig.table_models[1].name
        business_tab_name = DbConfig.table_models[0].name
        te_re_cols = DbConfig.table_models[8].columns
        te_ty_cols = DbConfig.table_models[1].columns
        bu_cols = DbConfig.table_models[0].columns
        gl_sql = "select bu.pkid from %s as t,%s as ty,%s as bu where t.%s=? and t.%s=ty.%s and ty.%s=bu.%s" \
                 % (test_register_tab_name, test_type_tab_name, business_tab_name,
                    te_re_cols[1], te_re_cols[0], te_ty_cols[0], te_ty_cols[1], bu_cols[0])
        gl_result = self.db.query(gl_sql, 1, (test_name,))
        if not gl_result:
            logger.error("Cannot get corresponding business for test %s" % test_name)
        return gl_result[0]

    def __get_output_list_by(self, output_tab_name):
        """
            To parse output_tab_name: {TestTypeName_OutputModelId}_OUTPUT,
            get OutputModelId by which to get the test output model,
            then get the output list from TEST_OUTPUT_MODEL table.
        """
        output_model_id = int(output_tab_name.split("_")[1])
        model_table = DbConfig.table_models[7].name
        model_cols = DbConfig.table_models[7].columns
        # model_cols[1]
        sql = "select %s from %s where pkid=?" % (model_cols[1], model_table)

        model_re = self.db.query(sql, 1, (output_model_id,))

        if not model_re:
            logger.error("Cannot get output model id!")
        model_list = model_re[0]
        # model_list = model_re
        return model_list

    def __get_global_location_list_by(self, business):
        tab_name, cols = DbConfig.get_model_by(0)
        sql = "select %s from %s where pkid=?" % (cols[1], tab_name)
        valu = (business,)
        resu = self.db.query(sql, 1, valu)
        if not resu:
            logger.error("Cannot get information for business %s" % business)
        return resu[0]

    def register_misc_item(self, name, value):
        tab_name, cols = DbConfig.get_model_by(9)
        para = {cols[0]: name}
        if not self.db.is_row_exist(tab_name, para):
            insert_pa = {cols[0]: name, cols[1]: value}
            logger.debug("Before inster MISC flag")
            self.db.insert_row(tab_name, commit_flag=True, **insert_pa)
            logger.debug("After inster MISC flag")
        else:
            update_pa = {cols[1]: value}
            logger.debug("Before update MISC flag: " + str(value))
            self.db.update_tabl(tab_name, para, update_pa)
            logger.debug("After update MISC flag")
            logger.debug('tab_name:' + str(tab_name) + ' para: ' + str(para) + ' update_pa: ' + str(update_pa))

    def is_in_prj_running(self):
        tab_name, cols = DbConfig.get_model_by(9)

        sql = "select %s from %s where %s=?" % (cols[1], tab_name, cols[0])

        v = (DmConstant.ITEM_IN_RUNNING,)

        logger.debug("Before query MISC")
        result = self.db.query(sql, 1, v)
        logger.debug("After query MISC: " + str(result))
        #        if result==DmConstant.VALUE_TRUE:
        if result and result[0] == DmConstant.VALUE_TRUE:
            return True
        return False

    def fetch_output_data(self, prj_run_id=-1, target_test=None, location=None, real_time=True):
        """
        prj_run_id: if prj_run_id is -1, Fetch the output data corresponding to the latest project run.
                    Or corresponding to the specific project run with the prj_run_id.
        target_test: Name of test whose data is to be fetched.
        location: Query condition of location. a dictionary with format: {location:value}
                e.g: {'test_name':'ptm3', 'site':'Target', 'wafer_id':1}
                The location is supposed to be the same str with the configured item in 'BUSINESS' section of db_table.ini.
        real_time: whether the query is during a test running

        return:
        [
            {
                'siteCor': u'(Tx,Ty)',
                'site': u'Target',
                'wafer_id': u'1',
                'test': u'ptm3',
                'test_name': u'ptm3',
                'device': u'dv1',
                'subsite': u'SS1',
                'data': {
                    'V_Pos': [55, 5, 5, 5, 5, 5, 5],
                    'vth_T1_SS2': [7, 7, 7, 7, 7, 7, 7, 7, 7],
                    'I_Pos': [6, 6, 6, 6, 6, 6, 6, 6, 6, 6]
                    }
            },
            {
                'siteCor': u'(Tx,Ty)',
                'site': u'Target',
                'wafer_id': u'1',
                'test': u'ptm1',
                'test_name': u'ptm1',
                'device': u'dv1',
                'subsite': u'SS1',
                'data': {'V_Pos': [1, 2, 3, 4, 5, 4, 5, 5, 6, 7, 7], 'vth_T2_SS2': [1, 2, 3, 4, 5, 66, 4, 5, 5, 6, 7, 7], 'I_Pos': [1, 2, 3, 4, 33, 5, 4, 5, 5, 6, 7, 7]}
            }
        ]
        """
        logger.debug("Enter fetch first")
        result = []
        # if real_time and not self.is_in_prj_running():
        #     logger.debug("Not in prj running")
        #     return result
        if prj_run_id == -1:
            print("DbConfig.table_models[2].name", DbConfig.table_models[2].name)
            sql = "select max(rowid) from %s" % DbConfig.table_models[2].name
            prj_id_result = self.db.query(sql, 1)
            print("prj_id_result", prj_id_result)
            if not prj_id_result:
                return result
            prj_run_id = prj_id_result[0]
        logger.debug("prj_run_id" + str(prj_run_id))

        tab_name = DbConfig.table_models[6].name
        cols = DbConfig.table_models[6].columns
        query_seq = (cols[1], cols[2], cols[3])
        m = query_seq + (tab_name, cols[0])

        map_val = (prj_run_id,)
        sql = "select %s,%s,%s from %s where %s=?" % m
        if target_test:
            sql = sql + ' and %s=?' % cols[2]
            map_val = map_val + (target_test,)

        logger.debug("map_sql:" + sql + "  map_val: " + str(map_val))
        map_result = self.db.query(sql, -1, map_val)
        logger.debug("map_result:" + str(map_result))
        for test_table_map in map_result:
            test_result = {}
            type_id = test_table_map[0]
            test_name = test_table_map[1]
            table_name = test_table_map[2]
            test_result["test_name"] = test_name

            output_seq = []
            model_list = self.__get_output_list_by(table_name)

            mo_alias = "mo"
            mod_sql = "%s.TEST_NAME," % mo_alias
            output_seq.append("TEST_NAME")
            for mod in model_list:
                mod_sql = mod_sql + "%s.%s," % (mo_alias, mod)
                output_seq.append(mod)

            busi_name = self.__get_business_by(test_name)
            gl_tab_name = "%s%d" % (DmConstant.GLOBAL_TABLE_NAME_PREFIX, busi_name)
            gl_alias = "gl"
            gl_sql = ""
            gl_loc_list = self.__get_global_location_list_by(busi_name)
            for loc in gl_loc_list:
                gl_sql = gl_sql + "%s.%s," % (gl_alias, loc)
                output_seq.append(loc)
            gl_sql = gl_sql.strip(",")
            final_sql = mod_sql
            if gl_sql.strip() == "":
                final_sql = final_sql.strip(",").strip()
            else:
                final_sql = final_sql + gl_sql

            location_sql = ''
            location_val = ()

            sql = "select %s from %s as %s,%s as %s where %s.TEST_NAME=? and %s.PROJECT_RUN_ID=? and %s.GLOBAL_LOCATION_ID=%s.pkid" \
                  % (final_sql, table_name, mo_alias, gl_tab_name, gl_alias, mo_alias, mo_alias, mo_alias, gl_alias)
            valu = (test_name, prj_run_id)
            if location:
                for loc in location.keys():
                    sql = sql + " and %s.%s=?" % (gl_alias, loc)
                    valu = valu + (location[loc],)
            #                    location_sql = location_sql + " and %s.%s=?"%(gl_alias,loc)
            #                    location_val = location_val + (location[loc],)
            logger.debug("Final SQL:" + str(sql))
            logger.debug("value tuple:" + str(valu))
            output_query_resu = self.db.query(str(sql), -1, valu)

            sql2 = "select * from %s where PROJECT_RUN_ID=?" % table_name
            va = (prj_run_id,)
            new_resu = self.db.query(str(sql2), 1, va)
            logger.debug("sql2:" + str(sql2))
            logger.debug("vallllll:" + str(va))
            logger.debug("new_resu:" + str(new_resu))
            for k in output_query_resu:
                for location_v in gl_loc_list:
                    ind = output_seq.index(location_v)
                    test_result[location_v] = k[ind]
                data = {}
                for op in model_list:
                    logger.debug("output_seq:" + str(output_seq))
                    logger.debug("op in loop:" + str(op))
                    op_ind = output_seq.index(op)
                    logger.debug("op_ind:" + str(op_ind))
                    logger.debug("k in query_resu:" + str(k))
                    data[op] = k[op_ind]
                test_result["data"] = data
                result.append(test_result)
        return result

    def fetch_output_data_by_limit_CL(self,
                                      prj_run_ids=None,
                                      target_test=None,
                                      w_list=None,
                                      s_list=None,
                                      lo_list=None,
                                      real_time=False,
                                      append=False,
                                      qujian=None):
        if lo_list is None:
            lo_list = []
        if s_list is None:
            s_list = []
        if w_list is None:
            w_list = []
        if prj_run_ids is None:
            prj_run_ids = []
        base_result = self.base_conditional_query_CL(prj_run_ids=prj_run_ids, target_test=target_test,
                                                     wafer_list=w_list, site_list=s_list, loop_list=lo_list,
                                                     real_time=real_time, append=append, quj=qujian)
        loop_dict = {}
        loop_seq = []
        for base_key in base_result.keys():
            item_values = base_result.get(base_key)
            sub_dict = {}
            logger.error("item_values:" + str(item_values))
            if len(item_values) > 2:
                if len(item_values[0]) == 0:
                    continue
                output_values = item_values[0]
                output_items = item_values[1]
                all_items = item_values[2]
                loop_v = 0
                for o_value in output_values:
                    if all_items.count("LOOP_ID") > 0:
                        ind = all_items.index("LOOP_ID")
                        if len(o_value) > ind:
                            loop_v = o_value[ind]
                            if loop_seq.count(o_value[ind]) == 0:
                                loop_seq.append(o_value[ind])

                            loop_cnt = len(o_value)
                            if len(output_items) < loop_cnt:
                                loop_cnt = len(output_items)
                            for i in range(loop_cnt):
                                if all_items.count(output_items[i]) > 0:
                                    sub_dict[output_items[i]] = o_value[all_items.index(output_items[i])]
                            if loop_v not in loop_dict:
                                loop_dict[loop_v] = {base_key: sub_dict}
                            else:
                                loop_dict[loop_v].update({base_key: sub_dict})
        return loop_dict, loop_seq

    def fetch_output_data_by_limit(self,
                                   prj_run_ids=None,
                                   target_test=None,
                                   w_list=None,
                                   s_list=None,
                                   lo_list=None,
                                   real_time=False,
                                   append=False,
                                   qujian=None):
        if lo_list is None:
            lo_list = []
        if s_list is None:
            s_list = []
        if w_list is None:
            w_list = []
        if prj_run_ids is None:
            prj_run_ids = []
        base_result = self.base_conditional_query(prj_run_ids=prj_run_ids, target_test=target_test, wafer_list=w_list,
                                                  site_list=s_list, loop_list=lo_list, real_time=real_time,
                                                  append=append, quj=qujian)

        site_seq = []
        loop_seq = []
        final_result = {}
        for queried_test in base_result.keys():
            map_resu = base_result.get(queried_test)
            row_results = map_resu[0]
            model_list = map_resu[1]
            output_seq = map_resu[2]
            for row_tuple in row_results:
                wafer = row_tuple[output_seq.index('wafer_id')]
                if wafer is None:
                    continue
                temp_wafer_dict = final_result.get(wafer, {})

                site = row_tuple[output_seq.index('site')]
                if site is None:
                    continue
                if site_seq.count(site) == 0:
                    site_seq.append(site)
                temp_site_dict = temp_wafer_dict.get(site, {})

                subsite = row_tuple[output_seq.index('subsite')]
                if subsite is None:
                    continue
                temp_subsite_dict = temp_site_dict.get(subsite, {})

                device = row_tuple[output_seq.index('device')]
                if device is None:
                    continue
                temp_device_dict = temp_subsite_dict.get(device, {})

                #                loop_v = row_tuple[output_seq.index('loop')]
                #                if loop_v is None:
                #                    continue
                #                if loop_seq.count(loop_v)==0:
                #                    loop_seq.append(loop_v)
                #                temp_loop_dict = temp_device_dict.get(loop_v,{})

                test = row_tuple[output_seq.index('test')]
                if test is None:
                    continue
                temp_test_dict = temp_device_dict.get(test, {})

                data = {}
                for op in model_list:
                    logger.debug("output_seq:" + str(output_seq))
                    logger.debug("op in loop:" + str(op))
                    op_ind = output_seq.index(op)
                    logger.debug("op_ind:" + str(op_ind))
                    logger.debug("row_tuple in query_resu:" + str(row_tuple))
                    temp_test_dict[op] = row_tuple[op_ind]
                temp_device_dict[test] = temp_test_dict
                #                temp_device_dict[loop_v] = temp_loop_dict
                temp_subsite_dict[device] = temp_device_dict
                temp_site_dict[subsite] = temp_subsite_dict
                temp_wafer_dict[site] = temp_site_dict
                final_result[wafer] = temp_wafer_dict
        return final_result, site_seq, loop_seq

    def query_prj_wafer_site_mapping(self,
                                     prj_run_ids=None,
                                     prj_name=None,
                                     target_test=None,
                                     location=None,
                                     real_time=True,
                                     append=False,
                                     limit_num=None):
        if prj_run_ids is None:
            prj_run_ids = []
        logger.debug("Enter NO0 first : " + str(prj_run_ids))
        result = self.generate_site_query_sql(prj_run_ids, prj_name, target_test, location, real_time, append,
                                              limit_num)
        print("result22222222", result)
        if not result or len(result) < 2:
            return 0
        outer_sql = result[0]
        final_val = result[1]
        logger.debug("Site outer_sql : " + outer_sql)
        final_sql = "select PROJECT_RUN_ID,wafer_id,count(*) as NUM from (%s) group by PROJECT_RUN_ID,wafer_id" % outer_sql
        map_resu = self.db.query(final_sql, -1, final_val)
        result = {}
        wafer_list = []
        for map_ele in map_resu:
            prj_id = map_ele[0]
            if prj_id not in result:
                result[prj_id] = {}
            wafer_dict = result.get(prj_id)
            wafer_id = map_ele[1]
            wafer_dict[wafer_id] = map_ele[2]
            if wafer_list.count(wafer_id) == 0:
                wafer_list.append(wafer_id)
        return result, wafer_list

    def query_209_misc_info(self, batch_n=None):
        sql = "select pkid as prj_id, MISC2 as groupn,MISC3 as prod,MISC4 as tech_p from PROJECT_RUN_REGISTER where MISC1='%s'group by groupn,prod,tech_p,prj_id" % batch_n
        ids_result = self.db.query(sql)
        misc_dict = {}
        tech_list = []
        tech_dict = {}
        f_prj_list = []
        group_list = []
        prod_list = []
        for ele in ids_result:
            group_n = ele[1]
            tmp_group_dict = misc_dict.get(group_n, {})
            if group_list.count(group_n) == 0:
                group_list.append(group_n)

            prod_n = ele[2]
            tmp_prod_dict = tmp_group_dict.get(prod_n, {})
            if prod_list.count(prod_n) == 0:
                prod_list.append(prod_n)

            tech_p = ele[3]
            if tech_list.count(tech_p) == 0:
                tech_list.append(tech_p)
            prj_list = tmp_prod_dict.get(tech_p, [])

            prj_id = ele[0]
            if prj_list.count(prj_id) == 0:
                prj_list.append(prj_id)
            if f_prj_list.count(prj_id) == 0:
                f_prj_list.append(prj_id)

            tmp_prod_dict[tech_p] = prj_list
            if tech_p in tech_dict:
                v = tech_dict.get(tech_p)
                if v < len(prj_list):
                    tech_dict[tech_p] = len(prj_list)
            else:
                tech_dict[tech_p] = len(prj_list)
            tmp_group_dict[prod_n] = tmp_prod_dict
            misc_dict[group_n] = tmp_group_dict
        tech_list.sort()
        f_prj_list.sort()
        group_list.sort()
        prod_list.sort()
        return misc_dict, tech_list, tech_dict, f_prj_list, group_list, prod_list

    def query_last_n_prjids(self, cnt):
        """
        query the last cnt project ids.
        """
        if cnt < 1:
            cnt = 5
        sql = "select prj_id from (select pkid as prj_id from PROJECT_RUN_REGISTER order by pkid desc limit " + str(
            cnt) + " offset 0) order by prj_id"
        ids_result = self.db.query(sql)
        prj_id_list = []
        for prj_id_ele in ids_result:
            prj_id_list.append(prj_id_ele[0])
        return prj_id_list

    def base_query_table_map(self,
                             prj_run_ids=None,
                             prj_name=None,
                             target_test=None,
                             location=None,
                             real_time=True,
                             append=False,
                             batch_num=None):
        if prj_run_ids is None:
            prj_run_ids = []
        if prj_run_ids is None:
            logger.debug("Prj_run_ids is None firstly: ")

        logger.debug("Enter NO3 first : " + str(prj_run_ids))
        final_result = {}
        logger.debug("real_time before qauery MISC: " + str(real_time))
        if real_time and not self.is_in_prj_running():
            logger.debug("***Not in prj running")
            return None, None, final_result

        tab_name = DbConfig.table_models[2].name
        print("tab_name111111", tab_name)
        cols = DbConfig.table_models[2].columns

        if prj_run_ids == [] and append:
            sql = "select max(pkid) from %s where %s!=1 " % (tab_name, cols[2])
            if prj_name is None:
                result_2 = self.db.query(sql, 1)
            else:
                sql = sql + "and %s=?" % cols[0]
                val = (prj_name,)
                result_2 = self.db.query(sql, 1, val)

            prj_id_nonappend = result_2[0]

            sql = "select pkid from %s where pkid>=? " % tab_name
            val = (prj_id_nonappend,)
            if prj_name is not None:
                sql = sql + "and %s=?" % cols[0]
                val = val + (prj_name,)
            ids_result = self.db.query(sql, -1, val)
            for ids in ids_result:
                prj_run_ids.append(ids[0])
            if len(ids_result) == 0:
                logger.error(
                    "Error: Query project ids_result (append is TRUE) is empty! \nError: Query  sql is %s.\nError: Query  param is:%s" % (
                        sql, str(val)))
        elif not prj_run_ids:
            s_val = ()
            sql = "select max(pkid) from %s where 1=1" % DbConfig.table_models[2].name
            if batch_num is not None:
                sql = "select pkid from %s where 1=1 and %s=?" % (DbConfig.table_models[2].name, cols[3])
                s_val = s_val + (batch_num,)
            if prj_name is not None:
                sql = sql + " and %s=?" % cols[0]
                s_val = s_val + (prj_name,)
            #                logger.debug("Before query max run id111")
            #                prj_id_result = self.db.query(sql,1,s_val)
            #                logger.debug("After query max run id111")
            #            else:
            #                logger.debug("Before query max run id222")
            #                prj_id_result = self.db.query(sql,1)
            #                logger.debug("After query max run id222: "+str(prj_id_result))
            if len(s_val) == 0:
                prj_id_result = self.db.query(sql, -1)
            else:
                prj_id_result = self.db.query(sql, -1, s_val)

            if not prj_id_result:
                logger.error(
                    "Error: Query prj_id_result is empty! \nError: Query  sql is %s.\nError: Query  param is:%s" % (
                        sql, str(s_val)))
                return final_result
            for id_re in prj_id_result:
                if len(id_re) == 0:
                    continue
                prj_run_ids.append(id_re[0])

        logger.debug("final prj_run_ids" + str(prj_run_ids))
        if prj_run_ids is None or len(prj_run_ids) == 0:
            logger.error("Error: Query prj_run_ids is empty! Return empty data directly!")
            return None, None, []
        tab_name = DbConfig.table_models[6].name
        cols = DbConfig.table_models[6].columns
        query_seq = (cols[1], cols[2], cols[3])
        m = query_seq + (tab_name, cols[0])

        map_val = ()
        temp = '(' + '?,' * (len(prj_run_ids) - 1) + '?' + ')'
        m = m + (temp,)
        ids_val = ()
        for tmp_v in prj_run_ids:
            ids_val = ids_val + (tmp_v,)
        map_val = map_val + ids_val
        sql = "select distinct %s,%s,%s from %s where %s in %s" % m
        if target_test:
            sql = sql + ' and %s=?' % cols[2]
            map_val = map_val + (target_test,)

        logger.debug("map_sql:" + sql + "  map_val: " + str(map_val))
        map_result = self.db.query(sql, -1, map_val)
        logger.debug("map_result:" + str(map_result))
        if len(map_result) == 0:
            logger.error(
                "Error: Query map_result is empty! \nQuery sql is: %s\nQuery param is:%s" % (sql, str(map_val)))
        return temp, ids_val, map_result

    def fetch_data_2nd(self, prj_run_ids=None, wafer_ids=None, site_ids=None, map_result=None):
        if site_ids is None:
            site_ids = []
        if wafer_ids is None:
            wafer_ids = []
        if prj_run_ids is None:
            prj_run_ids = []

    def base_site_cnt_query(self,
                            prj_run_ids=None,
                            prj_name=None,
                            target_test=None,
                            location=None,
                            real_time=True,
                            append=False,
                            limit_num=None):
        if prj_run_ids is None:
            prj_run_ids = []
        logger.debug("Enter NO0 first : " + str(prj_run_ids))
        result = self.generate_site_query_sql(prj_run_ids, prj_name, target_test, location, real_time, append,
                                              limit_num)
        print("result222222", result)
        if not result or len(result) < 2:
            return 0
        outer_sql = result[0]
        final_val = result[1]
        print("outer_sql", type(outer_sql), outer_sql)
        print("final_val", type(final_val), final_val)
        logger.debug("Site outer_sql : " + outer_sql)
        final_sql = "select count(*) from (%s)" % outer_sql
        # base_site_cnt = self.db.query(final_sql, -1, final_val)
        base_site_cnt = self.db.query(final_sql, -1, final_val)
        print("base_site_cnt", base_site_cnt, final_sql)
        logger.debug("base_site_cnt: " + str(base_site_cnt))
        if len(base_site_cnt) == 0 or len(base_site_cnt[0]) == 0:
            return 0
        print("base_site_cnt[0][0]", base_site_cnt[0][0])
        return base_site_cnt[0][0]

    def base_loop_query_CL(self,
                           prj_run_ids=None,
                           prj_name=None,
                           target_test=None,
                           location=None,
                           real_time=True,
                           append=False,
                           start=None,
                           limit_num=None):
        if prj_run_ids is None:
            prj_run_ids = []
        base_site_result = {}
        site_resu = self.generate_loop_query_sql_CL(prj_run_ids, prj_name, target_test, location, real_time, append,
                                                    limit_num)
        logger.debug('loop_resu: ' + str(site_resu))
        if not site_resu or len(site_resu) < 2:
            return base_site_result
        outer_sql = site_resu[0]
        final_val = site_resu[1]

        final_sql = outer_sql
        if start is not None or limit_num is not None:
            final_sql = "select * from (%s) limit ? offset ?" % outer_sql
            final_val = final_val + (limit_num, start)
        base_site_result = self.db.query(final_sql, -1, final_val)
        return base_site_result

    def base_loop_query(self,
                        prj_run_ids=None,
                        prj_name=None,
                        target_test=None,
                        location=None,
                        real_time=True,
                        append=False,
                        start=None,
                        limit_num=None):
        if prj_run_ids is None:
            prj_run_ids = []
        base_site_result = {}
        site_resu = None
        if constant.CUSTOMIZE_FOR_LOOP == 1:
            site_resu = self.generate_loop_query_sql_CL(prj_run_ids, prj_name, target_test, location, real_time, append,
                                                        limit_num)
        else:
            site_resu = self.generate_loop_query_sql(prj_run_ids, prj_name, target_test, location, real_time, append,
                                                     limit_num)
        logger.debug('loop_resu: ' + str(site_resu))
        if not site_resu or len(site_resu) < 2:
            return base_site_result
        outer_sql = site_resu[0]
        final_val = site_resu[1]

        final_sql = outer_sql
        if start is not None or limit_num is not None:
            final_sql = "select * from (%s) limit ? offset ?" % outer_sql
            final_val = final_val + (limit_num, start)
        base_site_result = self.db.query(final_sql, -1, final_val)
        return base_site_result

    def generate_loop_query_sql_CL(self,
                                   prj_run_ids=None,
                                   prj_name=None,
                                   target_test=None,
                                   location=None,
                                   real_time=True,
                                   append=False,
                                   limit_num=None):
        if prj_run_ids is None:
            prj_run_ids = []
        temp, ids_val, map_result = self.base_query_table_map(prj_run_ids, prj_name, target_test, location, real_time,
                                                              append)
        print("temp, ids_val, map_result", temp, ids_val, map_result)
        if len(map_result) == 0 or temp is None or ids_val is None:
            logger.debug("Debug flag2 ")
            return []
        final_val = ()
        outer_sql = "select distinct PROJECT_RUN_ID,LOOP_ID from (select PROJECT_RUN_ID,LOOP_ID,TIMESTAMP from ("
        for i, test_table_map in enumerate(map_result):
            test_result = {}
            type_id = test_table_map[0]
            test_name = test_table_map[1]
            table_name = test_table_map[2]
            test_result["test_name"] = test_name

            #            output_seq = []
            #            model_list = self.__get_output_list_by(table_name)
            #
            #            mo_alias = "mo"
            #            mod_sql = "%s.PROJECT_RUN_ID,"%(mo_alias)
            #            output_seq.append("PROJECT_RUN_ID")
            #            mod_sql = mod_sql + "%s.TEST_NAME,"%(mo_alias)
            #            output_seq.append("TEST_NAME")
            #            for mod in model_list:
            #                mod_sql = mod_sql+"%s.%s,"%(mo_alias,mod)
            #                output_seq.append(mod)

            #            busi_name = self.__get_business_by(test_name)
            #            gl_tab_name = "%s%d"%(DmConstant.GLOBAL_TABLE_NAME_PREFIX,busi_name)
            #            gl_alias = "gl"
            #            gl_sql = ""
            #            gl_loc_list = self.__get_global_location_list_by(busi_name)
            #            for loc in gl_loc_list:
            #                gl_sql = gl_sql + "%s.%s,"%(gl_alias,loc)
            #                output_seq.append(loc)
            #            gl_sql = gl_sql.strip(",")
            #            final_sql = mod_sql
            #            if gl_sql.strip() == "":
            #                final_sql = final_sql.strip(",").strip()
            #            else:
            #                final_sql = final_sql + gl_sql

            sub_sql = "select A.PROJECT_RUN_ID,A.LOOP_ID,A.TIMESTAMP from %s A where A.PROJECT_RUN_ID in %s " % (
                table_name, temp)
            outer_sql = outer_sql + sub_sql
            final_val = final_val + ids_val
            if i < len(map_result) - 1:
                outer_sql = outer_sql + " union "
            else:
                outer_sql = outer_sql + " order by A.PROJECT_RUN_ID "
        outer_sql = outer_sql + ") order by TIMESTAMP)"
        logger.debug("Query loop sql: " + outer_sql, str(final_val))
        logger.debug("final_val: " + str(final_val))
        return [outer_sql, final_val]

    def generate_loop_query_sql(self,
                                prj_run_ids=None,
                                prj_name=None,
                                target_test=None,
                                location=None,
                                real_time=True,
                                append=False,
                                limit_num=None):
        if prj_run_ids is None:
            prj_run_ids = []
        temp, ids_val, map_result = self.base_query_table_map(prj_run_ids, prj_name, target_test, location, real_time,
                                                              append)

        logger.debug("Debug flag1 ")
        base_site_result = {}
        if len(map_result) == 0 or temp is None or ids_val is None:
            logger.debug("Debug flag2 ")
            return []

        logger.debug("Debug flag3 ")
        final_val = ()
        outer_sql = "select distinct PROJECT_RUN_ID,wafer_id,loop from (select PROJECT_RUN_ID,wafer_id,loop,TIMESTAMP from ("
        for i, test_table_map in enumerate(map_result):
            test_result = {}
            type_id = test_table_map[0]
            test_name = test_table_map[1]
            table_name = test_table_map[2]
            test_result["test_name"] = test_name

            output_seq = []
            model_list = self.__get_output_list_by(table_name)

            mo_alias = "mo"
            mod_sql = "%s.PROJECT_RUN_ID," % mo_alias
            output_seq.append("PROJECT_RUN_ID")
            mod_sql = mod_sql + "%s.TEST_NAME," % mo_alias
            output_seq.append("TEST_NAME")
            for mod in model_list:
                mod_sql = mod_sql + "%s.%s," % (mo_alias, mod)
                output_seq.append(mod)

            busi_name = self.__get_business_by(test_name)
            gl_tab_name = "%s%d" % (DmConstant.GLOBAL_TABLE_NAME_PREFIX, busi_name)
            gl_alias = "gl"
            gl_sql = ""
            gl_loc_list = self.__get_global_location_list_by(busi_name)
            for loc in gl_loc_list:
                gl_sql = gl_sql + "%s.%s," % (gl_alias, loc)
                output_seq.append(loc)
            gl_sql = gl_sql.strip(",")
            final_sql = mod_sql
            if gl_sql.strip() == "":
                final_sql = final_sql.strip(",").strip()
            else:
                final_sql = final_sql + gl_sql

            sub_sql = "select B.wafer_id,B.loop,A.TIMESTAMP,A.PROJECT_RUN_ID from %s A,%s B where A.GLOBAL_LOCATION_ID=B.pkid and A.PROJECT_RUN_ID in %s " % (
                table_name, gl_tab_name, temp)
            outer_sql = outer_sql + sub_sql
            final_val = final_val + ids_val
            if i < len(map_result) - 1:
                outer_sql = outer_sql + " union "
            else:
                outer_sql = outer_sql + " order by A.PROJECT_RUN_ID "
        outer_sql = outer_sql + ") order by TIMESTAMP)"
        logger.debug("Query loop sql: " + outer_sql)
        logger.debug("final_val: " + str(final_val))
        return [outer_sql, final_val]

    def generate_site_query_sql(self,
                                prj_run_ids=None,
                                prj_name=None,
                                target_test=None,
                                location=None,
                                real_time=True,
                                append=False,
                                limit_num=None):
        if prj_run_ids is None:
            prj_run_ids = []
        logger.debug("Enter NO2 first : " + str(prj_run_ids))
        temp, ids_val, map_result = self.base_query_table_map(prj_run_ids, prj_name, target_test, location, real_time,
                                                              append)

        logger.debug("Debug flag1 ")
        base_site_result = {}
        if len(map_result) == 0 or temp is None or ids_val is None:
            logger.debug("Debug flag2 ")
            return []

        logger.debug("Debug flag3 ")
        final_val = ()
        outer_sql = "select distinct PROJECT_RUN_ID,wafer_id,site from (select PROJECT_RUN_ID,wafer_id,site,TIMESTAMP from ("
        for i, test_table_map in enumerate(map_result):
            test_result = {}
            type_id = test_table_map[0]
            test_name = test_table_map[1]
            table_name = test_table_map[2]
            test_result["test_name"] = test_name

            output_seq = []
            model_list = self.__get_output_list_by(table_name)
            print("model_list", model_list)
            mo_alias = "mo"
            mod_sql = "%s.PROJECT_RUN_ID," % mo_alias
            output_seq.append("PROJECT_RUN_ID")
            mod_sql = mod_sql + "%s.TEST_NAME," % mo_alias
            output_seq.append("TEST_NAME")
            for mod in model_list:
                mod_sql = mod_sql + "%s.%s," % (mo_alias, mod)
                output_seq.append(mod)

            busi_name = self.__get_business_by(test_name)
            gl_tab_name = "%s%d" % (DmConstant.GLOBAL_TABLE_NAME_PREFIX, busi_name)
            gl_alias = "gl"
            gl_sql = ""
            gl_loc_list = self.__get_global_location_list_by(busi_name)
            for loc in gl_loc_list:
                gl_sql = gl_sql + "%s.%s," % (gl_alias, loc)
                output_seq.append(loc)
            gl_sql = gl_sql.strip(",")
            final_sql = mod_sql
            if gl_sql.strip() == "":
                final_sql = final_sql.strip(",").strip()
            else:
                final_sql = final_sql + gl_sql

            sub_sql = "select B.wafer_id,B.site,A.TIMESTAMP,A.PROJECT_RUN_ID from %s as A,%s as B where A.GLOBAL_LOCATION_ID=B.pkid and A.PROJECT_RUN_ID in %s " % (
                table_name, gl_tab_name, temp)
            outer_sql = outer_sql + sub_sql
            final_val = final_val + ids_val
            if i < len(map_result) - 1:
                outer_sql = outer_sql + " union "
            else:
                outer_sql = outer_sql + " order by A.PROJECT_RUN_ID "
        outer_sql = outer_sql + ") order by TIMESTAMP)"
        logger.debug("Query site sql: " + outer_sql)
        print("Query site sql: ", outer_sql, str(final_val))
        logger.debug("final_val: " + str(final_val))
        return [outer_sql, final_val]

    def base_site_query(self,
                        prj_run_ids=None,
                        prj_name=None,
                        target_test=None,
                        location=None,
                        real_time=True,
                        append=False,
                        start=None,
                        limit_num=None):
        if prj_run_ids is None:
            prj_run_ids = []
        logger.debug("Enter NO1 first : " + str(prj_run_ids))
        base_site_result = {}
        site_resu = self.generate_site_query_sql(prj_run_ids, prj_name, target_test, location, real_time, append,
                                                 limit_num)
        print("result333333", site_resu)
        logger.debug('site_resu: ' + str(site_resu))
        if not site_resu or len(site_resu) < 2:
            return base_site_result
        outer_sql = site_resu[0]
        final_val = site_resu[1]

        final_sql = outer_sql
        if start or limit_num:
            final_sql = "select * from (%s) limit ? offset ? " % outer_sql
            final_val = final_val + (limit_num, start)
        print("outer_sql", outer_sql)
        print("final_val", final_val)
        base_site_result = self.db.query(final_sql, -1, final_val)
        print("base_site_result11111111111111", base_site_result)
        return base_site_result

    def base_conditional_query_CL(self,
                                  prj_run_ids=None,
                                  prj_name=None,
                                  target_test=None,
                                  location=None,
                                  wafer_list=None,
                                  site_list=None,
                                  loop_list=None,
                                  real_time=True,
                                  append=False,
                                  quj=None):

        if loop_list is None:
            loop_list = []
        if site_list is None:
            site_list = []
        if wafer_list is None:
            wafer_list = []
        if prj_run_ids is None:
            prj_run_ids = []
        logger.debug("Enter fetch first")
        final_result = {}
        logger.debug("real_time before qauery MISC: " + str(real_time))
        if real_time and not self.is_in_prj_running():
            logger.debug("***Not in prj running")
            return final_result

        tab_name = DbConfig.table_models[2].name
        cols = DbConfig.table_models[2].columns

        if prj_run_ids == [] and append:
            sql = "select max(pkid) from %s where %s!=1 " % (tab_name, cols[2])
            if prj_name is None:
                result_2 = self.db.query(sql, 1)
            else:
                sql = sql + "and %s=?" % cols[0]
                val = (prj_name,)
                result_2 = self.db.query(sql, 1, val)

            prj_id_nonappend = result_2[0]

            sql = "select pkid from %s where pkid>=? " % tab_name
            val = (prj_id_nonappend,)
            if prj_name is not None:
                sql = sql + "and %s=?" % cols[0]
                val = val + (prj_name,)
            ids_result = self.db.query(sql, -1, val)
            for ids in ids_result:
                prj_run_ids.append(ids[0])
        elif not prj_run_ids:
            sql = "select max(rowid) from %s " % DbConfig.table_models[2].name
            if prj_name is not None:
                sql = sql + "where %s=?" % cols[0]
                s_val = (prj_name,)
                logger.debug("Before query max run id111")
                prj_id_result = self.db.query(sql, 1, s_val)
                logger.debug("After query max run id111")
            else:
                logger.debug("Before query max run id222")
                prj_id_result = self.db.query(sql, 1)
                logger.debug("After query max run id222: " + str(prj_id_result))
            if not prj_id_result:
                return final_result
            prj_run_ids = [prj_id_result[0]]

        logger.debug("final prj_run_ids" + str(prj_run_ids))

        tab_name = DbConfig.table_models[6].name
        cols = DbConfig.table_models[6].columns
        query_seq = (cols[1], cols[2], cols[3])
        m = query_seq + (tab_name, cols[0])

        map_val = ()
        temp = '(' + '?,' * (len(prj_run_ids) - 1) + '?' + ')'
        m = m + (temp,)
        ids_val = ()
        for tmp_v in prj_run_ids:
            ids_val = ids_val + (tmp_v,)
        map_val = map_val + ids_val
        sql = "select distinct %s,%s,%s from %s where %s in %s" % m
        if target_test:
            sql = sql + ' and %s=?' % cols[2]
            map_val = map_val + (target_test,)

        logger.debug("map_sql:" + sql + "  map_val: " + str(map_val))
        map_result = self.db.query(sql, -1, map_val)
        logger.debug("map_result:" + str(map_result))

        base_query_result = {}
        for test_table_map in map_result:
            test_result = {}
            type_id = test_table_map[0]
            test_name = test_table_map[1]
            table_name = test_table_map[2]
            test_result["test_name"] = test_name

            output_seq = []
            model_list = self.__get_output_list_by(table_name)

            mo_alias = "mo"
            mod_sql = "%s.PROJECT_RUN_ID," % mo_alias
            output_seq.append("PROJECT_RUN_ID")
            mod_sql = mod_sql + "%s.TEST_NAME," % mo_alias
            output_seq.append("TEST_NAME")
            if constant.CUSTOMIZE_FOR_LOOP == 1:
                mod_sql = mod_sql + "%s.LOOP_ID," % mo_alias
                output_seq.append("LOOP_ID")
            for mod in model_list:
                mod_sql = mod_sql + "%s.%s," % (mo_alias, mod)
                output_seq.append(mod)

            #            busi_name = self.__get_business_by(test_name)
            #            gl_tab_name = "%s%d"%(DmConstant.GLOBAL_TABLE_NAME_PREFIX,busi_name)
            #            gl_alias = "gl"
            #            gl_sql = ""
            #            gl_loc_list = self.__get_global_location_list_by(busi_name)
            #            for loc in gl_loc_list:
            #                gl_sql = gl_sql + "%s.%s,"%(gl_alias,loc)
            #                output_seq.append(loc)
            #            gl_sql = gl_sql.strip(",")
            final_sql = mod_sql.rstrip(',')
            #            if gl_sql.strip() == "":
            #                final_sql = final_sql.strip(",").strip()
            #            else:
            #                final_sql = final_sql + gl_sql

            location_sql = ''
            location_val = ()

            sql = "select %s from %s as %s where %s.TEST_NAME=? and %s.PROJECT_RUN_ID in %s " \
                  % (final_sql, table_name, mo_alias, mo_alias, mo_alias, temp)
            valu = (test_name,) + ids_val

            if loop_list:
                # sql拼接
                loop_temp = '(' + '?,' * (len(loop_list) - 1) + '?' + ')'
                sql = sql + " and %s.LOOP_ID in %s" % (mo_alias, loop_temp)
                for loop_id in loop_list:
                    valu = valu + (loop_id,)

            if quj and len(quj) == 2:
                start = quj[0]
                end = quj[1]
                sql = sql + " and %s.LOOP_ID>=%d and %s.LOOP_ID<=%d " % (mo_alias, start, mo_alias, end)

            #            if location:
            #                for loc in location.keys():
            #                    sql = sql + " and %s.%s=?"%(gl_alias,loc)
            #                    valu = valu + (location[loc],)
            #                    location_sql = location_sql + " and %s.%s=?"%(gl_alias,loc)
            #                    location_val = location_val + (location[loc],)

            # sql = sql + " order by %s.PROJECT_RUN_ID" % mo_alias
            sql = sql + f" order by {mo_alias}.PROJECT_RUN_ID"
            logger.debug("Final SQL:" + str(sql))
            logger.debug("value tuple:" + str(valu))
            #            sql = "select B.site from GeneralTest_2_OUTPUT A,GLOBAL_LOCATION_1 B where A.GLOBAL_LOCATION_ID=B.pkid and A.PROJECT_RUN_ID=23 limit 3 offset 3"
            #            output_query_resu = self.db.query(sql)
            output_query_resu = self.db.query(str(sql), -1, valu)
            logger.debug("output_query_resu: ", str(output_query_resu))
            base_query_result[test_name] = [output_query_resu, model_list, output_seq]
        return base_query_result

    def base_conditional_query(self,
                               prj_run_ids=None,
                               prj_name=None,
                               target_test=None,
                               location=None,
                               wafer_list=None,
                               site_list=None,
                               loop_list=None,
                               real_time=True,
                               append=False,
                               quj=None):

        if loop_list is None:
            loop_list = []
        if site_list is None:
            site_list = []
        if wafer_list is None:
            wafer_list = []
        if prj_run_ids is None:
            prj_run_ids = []
        logger.debug("Enter fetch first")
        final_result = {}
        logger.debug("real_time before qauery MISC: " + str(real_time))
        if real_time and not self.is_in_prj_running():
            logger.debug("***Not in prj running")
            return final_result

        tab_name = DbConfig.table_models[2].name
        cols = DbConfig.table_models[2].columns

        if prj_run_ids == [] and append:
            #  % (tab_name, cols[2])
            sql = f"select max(pkid) from {tab_name} where {cols[2]}!=1"
            if prj_name is None:
                result_2 = self.db.query(sql, 1)
            else:
                sql = sql + "and %s=?" % cols[0]
                val = (prj_name,)
                result_2 = self.db.query(sql, 1, val)

            prj_id_nonappend = result_2[0]

            sql = "select pkid from %s where pkid>=? " % tab_name
            val = (prj_id_nonappend,)
            if prj_name is not None:
                sql = sql + "and %s=?" % cols[0]
                val = val + (prj_name,)
            ids_result = self.db.query(sql, -1, val)
            for ids in ids_result:
                prj_run_ids.append(ids[0])
        elif not prj_run_ids:
            sql = "select max(rowid) from %s " % DbConfig.table_models[2].name
            if prj_name is not None:
                sql = sql + "where %s=?" % cols[0]
                s_val = (prj_name,)
                logger.debug("Before query max run id111")
                prj_id_result = self.db.query(sql, 1, s_val)
                logger.debug("After query max run id111")
            else:
                logger.debug("Before query max run id222")
                prj_id_result = self.db.query(sql, 1)
                logger.debug("After query max run id222: " + str(prj_id_result))
            if not prj_id_result:
                return final_result
            prj_run_ids = [prj_id_result[0]]

        logger.debug("final prj_run_ids" + str(prj_run_ids))

        tab_name = DbConfig.table_models[6].name
        cols = DbConfig.table_models[6].columns
        query_seq = (cols[1], cols[2], cols[3])
        m = query_seq + (tab_name, cols[0])

        map_val = ()
        temp = '(' + '?,' * (len(prj_run_ids) - 1) + '?' + ')'
        m = m + (temp,)
        ids_val = ()
        for tmp_v in prj_run_ids:
            ids_val = ids_val + (tmp_v,)
        map_val = map_val + ids_val
        sql = "select distinct %s,%s,%s from %s where %s in %s" % m
        if target_test:
            sql = sql + ' and %s=?' % cols[2]
            map_val = map_val + (target_test,)

        logger.debug("map_sql:" + sql + "  map_val: " + str(map_val))
        map_result = self.db.query(sql, -1, map_val)
        logger.debug("map_result:" + str(map_result))

        base_query_result = {}
        for test_table_map in map_result:
            test_result = {}
            type_id = test_table_map[0]
            test_name = test_table_map[1]
            table_name = test_table_map[2]
            test_result["test_name"] = test_name

            output_seq = []
            model_list = self.__get_output_list_by(table_name)

            mo_alias = "mo"
            mod_sql = "%s.PROJECT_RUN_ID," % mo_alias
            output_seq.append("PROJECT_RUN_ID")
            mod_sql = mod_sql + "%s.TEST_NAME," % mo_alias
            output_seq.append("TEST_NAME")
            for mod in model_list:
                mod_sql = mod_sql + "%s.%s," % (mo_alias, mod)
                output_seq.append(mod)

            busi_name = self.__get_business_by(test_name)
            gl_tab_name = "%s%d" % (DmConstant.GLOBAL_TABLE_NAME_PREFIX, busi_name)
            gl_alias = "gl"
            gl_sql = ""
            gl_loc_list = self.__get_global_location_list_by(busi_name)
            for loc in gl_loc_list:
                gl_sql = gl_sql + "%s.%s," % (gl_alias, loc)
                output_seq.append(loc)
            gl_sql = gl_sql.strip(",")
            final_sql = mod_sql
            if gl_sql.strip() == "":
                final_sql = final_sql.strip(",").strip()
            else:
                final_sql = final_sql + gl_sql

            location_sql = ''
            location_val = ()

            sql = "select %s from %s as %s,%s as %s where %s.TEST_NAME=? and %s.PROJECT_RUN_ID in %s and %s.GLOBAL_LOCATION_ID=%s.pkid" \
                  % (
                      final_sql, table_name, mo_alias, gl_tab_name, gl_alias, mo_alias, mo_alias, temp, mo_alias,
                      gl_alias)
            valu = (test_name,) + ids_val

            if wafer_list:
                wafer_temp = '(' + '?,' * (len(wafer_list) - 1) + '?' + ')'
                sql = sql + " and %s.wafer_id in %s" % (gl_alias, wafer_temp)
                for w_id in wafer_list:
                    valu = valu + (w_id,)

            if site_list:
                site_temp = '(' + '?,' * (len(site_list) - 1) + '?' + ')'
                sql = sql + " and %s.site in %s" % (gl_alias, site_temp)
                for s_id in site_list:
                    valu = valu + (s_id,)

            if loop_list:
                loop_temp = '(' + '?,' * (len(loop_list) - 1) + '?' + ')'
                sql = sql + " and %s.loop in %s" % (gl_alias, loop_temp)
                for loop_id in loop_list:
                    valu = valu + (loop_id,)

            if quj and len(quj) == 2:
                start = quj[0]
                end = quj[1]
                sql = sql + " and %s.loop>=%d and %s.loop<=%d " % (gl_alias, start, gl_alias, end)

            if location:
                for loc in location.keys():
                    sql = sql + " and %s.%s=?" % (gl_alias, loc)
                    valu = valu + (location[loc],)
            #                    location_sql = location_sql + " and %s.%s=?"%(gl_alias,loc)
            #                    location_val = location_val + (location[loc],)

            sql = sql + " order by %s.PROJECT_RUN_ID" % mo_alias
            logger.debug("Final SQL:" + str(sql))
            logger.debug("value tuple:" + str(valu))
            #            sql = "select B.site from GeneralTest_2_OUTPUT A,GLOBAL_LOCATION_1 B where A.GLOBAL_LOCATION_ID=B.pkid and A.PROJECT_RUN_ID=23 limit 3 offset 3"
            #            output_query_resu = self.db.query(sql)
            output_query_resu = self.db.query(str(sql), -1, valu)
            logger.debug("output_query_resu: ", str(output_query_resu))
            base_query_result[test_name] = [output_query_resu, model_list, output_seq]
        return base_query_result

    def fetch_output_data_append(self,
                                 prj_run_ids=None,
                                 prj_name=None,
                                 target_test=None,
                                 location=None,
                                 real_time=True,
                                 append=False):
        """
        prj_run_ids: if prj_run_id is [], Fetch the output data corresponding to the latest project run.
                    Or corresponding to the specific project run with the prj_run_id.
        prj_name: name of the project to register
        target_test: Name of test whose data is to be fetched.
        location: Query condition of location. a dictionary with format: {location:value}
                e.g: {'test_name':'ptm3', 'site':'Target', 'wafer_id':1}
                The location is supposed to be the same str with the configured item in 'BUSINESS' section of db_table.ini.
        real_time: whether the query is during a test running
        append: whether fetch data for append supported

        return:
        {
            12:[
                {
                    'siteCor': u'(Tx,Ty)',
                    'site': u'Target',
                    'wafer_id': u'1',
                    'test': u'ptm3',
                    'test_name': u'ptm3',
                    'device': u'dv1',
                    'subsite': u'SS1',
                    'data': {
                        'V_Pos': [55, 5, 5, 5, 5, 5, 5],
                        'vth_T1_SS2': [7, 7, 7, 7, 7, 7, 7, 7, 7],
                        'I_Pos': [6, 6, 6, 6, 6, 6, 6, 6, 6, 6]
                        }
                },
                {
                    'siteCor': u'(Tx,Ty)',
                    'site': u'Target',
                    'wafer_id': u'1',
                    'test': u'ptm1',
                    'test_name': u'ptm1',
                    'device': u'dv1',
                    'subsite': u'SS1',
                    'data': {'V_Pos': [1, 2, 3, 4, 5, 4, 5, 5, 6, 7, 7], 'vth_T2_SS2': [1, 2, 3, 4, 5, 66, 4, 5, 5, 6, 7, 7], 'I_Pos': [1, 2, 3, 4, 33, 5, 4, 5, 5, 6, 7, 7]}
                }
               ],
            13:[{},{}],
            25:[{},{},{}]
        }
        """
        print("wewewewewewewe")
        if prj_run_ids is None:
            prj_run_ids = []
        logger.debug("Enter fetch first")
        final_result = {}
        logger.debug("real_time before qauery MISC: " + str(real_time))
        # if real_time and not self.is_in_prj_running():
        #     logger.debug("***Not in prj running")
        #     return final_result

        tab_name = DbConfig.table_models[2].name
        cols = DbConfig.table_models[2].columns
        print("tab_name", tab_name)
        print("cols", cols)
        if prj_run_ids == [] and append:
            sql = "select max(pkid) from %s where %s!=1 " % (tab_name, cols[2])
            print("sql111111111", sql)
            if prj_name is None:
                result_2 = self.db.query(sql, 1)
            else:
                sql = sql + "and %s=?" % cols[0]
                val = (prj_name,)
                result_2 = self.db.query(sql, 1, val)

            prj_id_nonappend = result_2[0]
            if prj_id_nonappend is None:
                prj_run_ids = []
            else:
                sql = "select pkid from %s where pkid>=? " % tab_name
                val = (prj_id_nonappend,)
                if prj_name is not None:
                    sql = sql + "and %s=?" % cols[0]
                    val = val + (prj_name,)
                ids_result = self.db.query(sql, -1, val)
                for ids in ids_result:
                    prj_run_ids.append(ids[0])
        elif not prj_run_ids:
            sql = "select max(rowid) from %s " % DbConfig.table_models[2].name
            print("sql222222", sql)
            if prj_name is not None:
                sql = sql + "where %s=?" % cols[0]
                s_val = (prj_name,)
                logger.debug("Before query max run id111")
                prj_id_result = self.db.query(sql, 1, s_val)
                logger.debug("After query max run id111")
            else:
                logger.debug("Before query max run id222")
                prj_id_result = self.db.query(sql, 1)
                logger.debug("After query max run id222: " + str(prj_id_result))

            if not prj_id_result:
                return final_result
            prj_run_ids = [prj_id_result[0]]

        print("prj_run_ids", prj_run_ids)

        logger.debug("final prj_run_ids" + str(prj_run_ids))

        tab_name = DbConfig.table_models[6].name
        cols = DbConfig.table_models[6].columns
        query_seq = (cols[1], cols[2], cols[3])
        m = query_seq + (tab_name, cols[0])
        print("query_seq", query_seq)

        map_val = ()
        prj_len = len(prj_run_ids)
        temp = '()'
        if prj_len > 0:
            temp = '(' + '?,' * (len(prj_run_ids) - 1) + '?' + ')'
        m = m + (temp,)
        ids_val = ()
        for tmp_v in prj_run_ids:
            ids_val = ids_val + (tmp_v,)
        map_val = map_val + ids_val
        sql = "select distinct %s,%s,%s from %s where %s in %s" % m

        if target_test:
            sql = sql + ' and %s=?' % cols[2]
            map_val = map_val + (target_test,)

        logger.debug("map_sql:" + sql + "  map_val: " + str(map_val))
        map_result = self.db.query(sql, -1, map_val)
        print("sql33333333", sql)
        print("map_result", map_result)
        logger.debug("map_result:" + str(map_result))
        for test_table_map in map_result:
            test_result = {}
            type_id = test_table_map[0]
            test_name = test_table_map[1]
            table_name = test_table_map[2]
            test_result["test_name"] = test_name

            output_seq = []
            model_list = self.__get_output_list_by(table_name)
            print("model_list", model_list)

            mo_alias = "mo"
            mod_sql = "%s.PROJECT_RUN_ID," % mo_alias
            output_seq.append("PROJECT_RUN_ID")
            mod_sql = mod_sql + "%s.TEST_NAME," % mo_alias
            output_seq.append("TEST_NAME")
            for mod in model_list:
                mod_sql = mod_sql + "%s.%s," % (str(mo_alias), str(mod))
                output_seq.append(str(mod))
            print("output_seq", output_seq)

            busi_name = self.__get_business_by(test_name)
            gl_tab_name = "%s%d" % (DmConstant.GLOBAL_TABLE_NAME_PREFIX, busi_name)
            gl_alias = "gl"
            gl_sql = ""
            gl_loc_list = self.__get_global_location_list_by(busi_name)
            for loc in gl_loc_list:
                gl_sql = gl_sql + "%s.%s," % (gl_alias, loc)
                if constant.CUSTOMIZE_FOR_LOOP == 0:
                    output_seq.append(loc)
            gl_sql = gl_sql.strip(",")
            final_sql = mod_sql
            if constant.CUSTOMIZE_FOR_LOOP == 1:
                final_sql = final_sql.strip(",").strip()
            else:
                if gl_sql.strip() == "":
                    final_sql = final_sql.strip(",").strip()
                else:
                    final_sql = final_sql + gl_sql

            location_sql = ''
            location_val = ()

            if constant.CUSTOMIZE_FOR_LOOP == 1:
                sql = "select %s from %s as %s where %s.TEST_NAME=? and %s.PROJECT_RUN_ID in %s" \
                      % (final_sql, table_name, mo_alias, mo_alias, mo_alias, temp)
                valu = (test_name,) + ids_val
            else:
                sql = "select %s from %s as %s,%s as %s where %s.TEST_NAME=? and %s.PROJECT_RUN_ID in %s and %s.GLOBAL_LOCATION_ID=%s.pkid" \
                      % (final_sql, table_name, mo_alias, gl_tab_name, gl_alias, mo_alias, mo_alias, temp, mo_alias,
                         gl_alias)
                valu = (test_name,) + ids_val
                if location:
                    for loc in location.keys():
                        sql = sql + " and %s.%s=?" % (gl_alias, loc)
                        valu = valu + (location[loc],)
            #                    location_sql = location_sql + " and %s.%s=?"%(gl_alias,loc)
            #                    location_val = location_val + (location[loc],)
            sql = sql + " order by %s.PROJECT_RUN_ID" % mo_alias
            logger.debug("Final SQL:" + str(sql))
            logger.debug("value tuple:" + str(valu))
            print("sql3.5", sql)
            output_query_resu = self.db.query(sql, -1, valu)
            logger.debug("output_query_resu: ", str(output_query_resu))
            for k in output_query_resu:
                if constant.CUSTOMIZE_FOR_LOOP == 0:
                    for location_v in gl_loc_list:
                        ind = output_seq.index(location_v)
                        test_result[location_v] = k[ind]
                data = {}
                for op in model_list:
                    logger.debug("output_seq:" + str(output_seq))
                    logger.debug("op in loop:" + str(op))
                    op_ind = output_seq.index(op)
                    logger.debug("op_ind:" + str(op_ind))
                    logger.debug("k in query_resu:" + str(k))
                    data[op] = k[op_ind]
                test_result["data"] = data
                tmp_prj_run_id = k[0]
                if tmp_prj_run_id in final_result:
                    data_list = final_result.get(tmp_prj_run_id)
                    data_list.append(copy.deepcopy(test_result))
                else:
                    final_result[tmp_prj_run_id] = [copy.deepcopy(test_result)]
        logger.debug('Final result in DbImple: ', final_result)
        return final_result

    def update_report_value(self, device, loop, value_dict, real_ti=True, commit=True):
        """
        Supposed to with only one wafer, one site, multiple devices.
        value_dict: {item1:value,item2:value2}
        <<< itemX must be same to the output item in sspf.>>>
        e.g.  {'bin_sort':'F1'}
        """
        temp, ids_val, map_result = self.base_query_table_map(prj_run_ids=[], real_time=real_ti)

        if len(map_result) == 0 or temp is None or ids_val is None:
            logger.error("update_report_value Exception:  Table map result is empty! ")
            return

        table_name = map_result[0][2]

        sub_sql = ""
        val_name = ()
        val_v = ()
        for keyv in value_dict.keys():
            sub_sql = sub_sql + keyv + "=?,"
            val_v = val_v + (DbBase.SeqData([value_dict.get(keyv)]),)
        sub_sql = sub_sql.rstrip(',')
        val_v = val_v + ids_val
        if constant.CUSTOMIZE_FOR_LOOP == 1:
            val_v = val_v + (loop,)
        else:
            val_v = val_v + (device, loop)

        for i, ele in enumerate(map_result):
            tab_na = ele[2]
            test_na = ele[1]
            sql = ""
            if constant.CUSTOMIZE_FOR_LOOP == 1:
                sql = "update %s set %s where PROJECT_RUN_ID in %s and LOOP_ID=?" % (tab_na, sub_sql, temp)
            else:
                sql = "update %s set %s where PROJECT_RUN_ID in %s and GLOBAL_LOCATION_ID in (select pkid from GLOBAL_LOCATION_1 where device=? and loop=?)" % (
                    tab_na, sub_sql, temp)

            self.db.cur.execute(sql, val_v)

        if commit:
            self.db.con.commit()

    def query_items_fail(self,
                         prj_run_ids=None,
                         prj_name=None,
                         target_test=None,
                         location=None,
                         real_time=True,
                         append=False,
                         limit_num=None,
                         items_for_fail_report=None):
        if items_for_fail_report is None:
            items_for_fail_report = {}
        if prj_run_ids is None:
            prj_run_ids = []
        result = {}
        if len(items_for_fail_report) == 0:
            logger.error("query_items_fail Exception:  items_for_fail_report result is empty! ")
            return result

        temp, ids_val, map_result = self.base_query_table_map(prj_run_ids, prj_name, target_test, location, real_time,
                                                              append)

        if len(map_result) == 0 or temp is None or ids_val is None:
            logger.error("query_items_fail Exception:  Table map result is empty! ")
            return result

        #        table_name = map_result[0][2]
        out_sql = ''
        out_val = ()
        for i, ele in enumerate(map_result):
            tab_na = ele[2]
            test_na = ele[1]
            i__for_fail_report = items_for_fail_report.get(test_na, [])
            for item in i__for_fail_report:
                out_sql = out_sql + "select '%s' as name,A.%s as item,count(*) from %s as A where A.PROJECT_RUN_ID in %s group by A.%s UNION " % (
                    item, item, tab_na, temp, item)
                out_val = out_val + ids_val

        out_sql = out_sql.rstrip('UNION ')

        result = self.db.query(out_sql, -1, out_val)
        return result

    def query_last_loop(self,
                        prj_run_ids=None,
                        prj_name=None,
                        target_test=None,
                        location=None,
                        real_time=True,
                        append=False,
                        limit_num=None,
                        batch_n=None):
        if prj_run_ids is None:
            prj_run_ids = []
        temp, ids_val, map_result = self.base_query_table_map(prj_run_ids, prj_name, target_test, location, real_time,
                                                              append, batch_num=batch_n)
        max_loop = 0
        if len(map_result) == 0 or temp is None or ids_val is None:
            logger.error("query_last_loop Exception:  Table map result is empty! ")
            return max_loop
        for i, ele in enumerate(map_result):
            tab_na = ele[2]
            test_na = ele[1]
            sql = "select max(LOOP_ID) from %s where PROJECT_RUN_ID in %s" % (tab_na, temp)
            result = self.db.query(sql, -1, ids_val)
            if len(result) == 0 or len(result[0]) == 0:
                pass
            id = result[0][0]
            if id > max_loop:
                max_loop = id
        return max_loop

    def query_binsort_result(self,
                             prj_run_ids=None,
                             prj_name=None,
                             target_test=None,
                             location=None,
                             real_time=True,
                             append=False,
                             limit_num=None,
                             batch_n=None):
        if prj_run_ids is None:
            prj_run_ids = []
        temp, ids_val, map_result = self.base_query_table_map(prj_run_ids, prj_name, target_test, location, real_time,
                                                              append, batch_num=batch_n)

        result = {}
        if len(map_result) == 0 or temp is None or ids_val is None:
            logger.error("query_binsort_result Exception:  Table map result is empty! ")
            return result

        table_name = map_result[0][2]
        #        out_sql = "select A.bin_sort,count(*) from %s as A where A.PROJECT_RUN_ID in %s group by A.bin_sort"%(table_name,temp)
        #        out_sql = "select bin_sort,count(*) from %s group by bin_sort"
        out_vals = ()
        sub_sql = "("
        for i, ele in enumerate(map_result):
            tab_na = ele[2]
            test_na = ele[1]
            if constant.CUSTOMIZE_FOR_LOOP == 1:
                sub_sql = sub_sql + "select bin_sort,LOOP_ID as loop from %s where PROJECT_RUN_ID in %s" % (
                    tab_na, temp)
            else:
                sub_sql = sub_sql + "select A1.bin_sort,B1.loop from %s as A1,GLOBAL_LOCATION_1 as B1 where A1.GLOBAL_LOCATION_ID=B1.pkid and A1.PROJECT_RUN_ID in %s" % (
                    tab_na, temp)
            out_vals = out_vals + ids_val
            if i != len(map_result) - 1:
                sub_sql = sub_sql + " Union "
        sub_sql = sub_sql + ")"

        out_sql = "select bin_sort,count(*) from (select distinct bin_sort,loop from %s) group by bin_sort" % sub_sql
        result = self.db.query(out_sql, -1, out_vals)
        return result

    def register_loop_location(self, loopId, *loc):
        """
         loc = wafer,siteID,siteCoord,ssname,dev,tst_name
        """
        paras = {}
        busi = DbConfig.businesses[0]
        cols = DbConfig.businesses_model[busi][0]
        logger.debug("cols: %s" % str(cols))
        logger.debug("loc: %s" % str(loc))
        new_loc = loc + (loopId,)
        for i, col in enumerate(cols):
            paras[col] = str(new_loc[i])
        #        paras['loop'] = loopId
        global BUSINESS_IDS
        logger.debug("Before get business: %s" % str(BUSINESS_IDS))
        pkid = BUSINESS_IDS[busi]
        table_name = "%s%d" % (DmConstant.GLOBAL_TABLE_NAME_PREFIX, pkid)
        try:
            resu, loc_pkid = self.db.is_row_exist_pkid(table_name, paras)
            if not resu:
                loc_pkid = self.db.insert_row(table_name, commit_flag=True, **paras)
            return loc_pkid
        except:
            logger.error("Error while inserting global location into table:\n%s" % traceback.format_exc())
            return None


if __name__ == '__main__':
    import sys

    db_file = "D:/aaas.db"

    sys.path.insert(0, r"D:\workspace\tempSpace1\MRBIG_Cust\ATS")
    dbimpl = DbImpl(db_file)

    b = dbimpl.query_last_loop(real_time=False, batch_n="STDFTest4")
    a = dbimpl.query_binsort_result(real_time=False, batch_n="STDFTest4")
