from datetime import datetime
import json
from sqlalchemy import text
from sqlalchemy.exc import PendingRollbackError


def str2Array(string_, spilt_str=","):
    resList = []
    if string_ and len(string_) > 0:
        temp_list = string_.split(spilt_str)
        for temp in temp_list:
            if len(temp) > 0:
                resList.append(temp)
    return resList


def data_integrity(request,session):
    json_params = request.json
    start_date = json_params["start_date"]
    end_date = json_params["end_date"]
    otas = json_params["ota_versions"].split(",")
    vins = json_params["vehicles"].split(",")
    trigger_ids = json_params["triggerId"].split(",")
    triggerIdStr = ""
    tid_list = []
    for tid in trigger_ids:
        if len(tid) > 0:
            tid_list.append(tid)
    if len(tid_list) > 0:
        triggerIdStr = f" and  fc.trigger_id in ('{"','".join(tid_list)}')"

    otasStr = ""
    otasStrArray = []
    for ota in otas:
        if len(ota) > 0:
            otasStrArray.append(ota)
    if len(otasStrArray) > 0:
        otasStr = f" and  fc.ota_version in ('{"','".join(otasStrArray)}')"
    # 是包含未知版本还是只有未知版本
    hasUnkownOTA = "未知版本" in otasStrArray and len(otasStrArray) > 1
    onlyUnkownOTA = "未知版本" in otasStrArray and len(otasStrArray) == 1
    OTA_DISN = ["(fc.ota_version='' or fc.ota_version='unknown')",
                "((fc.ota_version_type >= 'OTA5' and fc.ota_version_type like '%OTA%' and fc.ota_win_type >= 'WIN2.0') or (fc.ota_version='' or fc.ota_version='unknown'))",
                "(fc.ota_version_type >= 'OTA5' and fc.ota_version_type like '%OTA%' and fc.ota_win_type >= 'WIN2.0')"]
    OTA_DISN_STR = OTA_DISN[2]
    if onlyUnkownOTA:
        OTA_DISN_STR = OTA_DISN[0]
    if hasUnkownOTA:
        OTA_DISN_STR = OTA_DISN[1]
    if onlyUnkownOTA or hasUnkownOTA:
        otasStr = ""
    vins_r = []
    vinStr = ""
    for vin in vins:
        if len(vin) > 0:
            vins_r.append(vin)
    if len(vins_r) > 0:
        vinStr = f" and fc.uuid in ('{"','".join(vins_r)}')"
    # 返回数据
    result = {
        "code": 200,
        "message": "数据完整性",
        "data": {
            "daily_rate": [],
            "daily_vehicle_rate": [],
            "vehicle": [],
            "file_size": []
        }
    }
    try:
        # 直接查询出缺失率
        sql_rate_daily = f"""
                               select bd.date_temp
								   ,round(sum(bd.missing_bytesoup)/sum(bd.need_bytesoup)*100,1) missing_bytesoup_rate
								   ,round(sum(bd.missing_log)/sum(bd.need_log)*100,1) missing_log_rate
								   ,round(sum(bd.missing_json)/sum(bd.need_json)*100,1) missing_json_rate
								   ,round(sum(bd.missing_mbag)/sum(bd.need_mbag)*100,1) missing_mbag_rate
							  from (select  date_temp
										   ,if(LOCATE('bytesoup',fc.missing_categories) > 0,1,0) as missing_bytesoup
										   ,if(LOCATE('log',fc.missing_categories) > 0,1,0) as missing_log
										   ,if(LOCATE('mbag',fc.missing_categories) > 0,1,0) as missing_mbag
										   ,if(LOCATE('json',fc.missing_categories) > 0,1,0) as missing_json
										   ,if(LOCATE('bs',fc.need_files) > 0,1,0) as need_bytesoup
										   ,if(LOCATE('log',fc.need_files) > 0,1,0) as need_log
										   ,if(LOCATE('mbag',fc.need_files) > 0,1,0) as need_mbag
										   ,if(LOCATE('json',fc.need_files) > 0,1,0) as need_json
									  from (select DATE_FORMAT(FROM_UNIXTIME(data_ts/1000),'%Y-%m-%d')  as date_temp
												   ,missing_categories
												   ,need_files
												   ,tid as trigger_id
												   ,right(t.uuid,12) as uuid
												   ,ota_version
											  from files_completeness t
										   )fc
									 where fc.date_temp >= '{start_date}' 
									   and fc.date_temp <= '{end_date}' {vinStr} {triggerIdStr} {otasStr} 	
								   ) bd
							 group by bd.date_temp
							 order by bd.date_temp;
                            """
        sql_rate_daily_vin = f"""
                        select bd.date_temp
								   ,min(bd.user_name) as user_name
								   ,min(bd.class_name) as class_name
								   ,min(bd.vin)
								   ,bd.uuid
								   ,min(bd.ota_version) as ota_version
								   ,round((sum(bd.missing_bytesoup)+sum(bd.missing_log)+sum(bd.missing_json)+sum(bd.missing_mbag))
								   /
								   (sum(bd.need_bytesoup)+sum(bd.need_log)+sum(bd.need_json)+sum(bd.need_mbag))
								   *100,1) as total_missing_rate
								   ,round(sum(bd.file_size)/1024/1024,1) as total_file_size_MB
								   ,round(sum(bd.missing_bytesoup)/sum(bd.need_bytesoup)*100,1) missing_bytesoup_rate
								   ,round(sum(bd.missing_log)/sum(bd.need_log)*100,1) missing_log_rate
								   ,round(sum(bd.missing_json)/sum(bd.need_json)*100,1) missing_json_rate
								   ,round(sum(bd.missing_mbag)/sum(bd.need_mbag)*100,1) missing_mbag_rate
							  from (
								   select  fc.date_temp
										   ,right(fc.uuid,12) as uuid
										   ,fc.ota_version
										   ,fc.package_size as file_size
										   ,if(ISNULL(vim.int_vehicle_no),right(fc.uuid,12),vim.int_vehicle_no) as user_name
										   ,if(ISNULL(vim.vehicle_tested_by),'其他',vim.vehicle_tested_by) as class_name
										   ,if(ISNULL(vim.vin),right(fc.uuid,12),vim.vin) as vin
										   ,if(LOCATE('bytesoup',fc.missing_categories) > 0,1,0) as missing_bytesoup
										   ,if(LOCATE('log',fc.missing_categories) > 0,1,0) as missing_log
										   ,if(LOCATE('mbag',fc.missing_categories) > 0,1,0) as missing_mbag
										   ,if(LOCATE('json',fc.missing_categories) > 0,1,0) as missing_json
										   ,if(LOCATE('bs',fc.need_files) > 0,1,0) as need_bytesoup
										   ,if(LOCATE('log',fc.need_files) > 0,1,0) as need_log
										   ,if(LOCATE('mbag',fc.need_files) > 0,1,0) as need_mbag
										   ,if(LOCATE('json',fc.need_files) > 0,1,0) as need_json
									  from (select DATE_FORMAT(FROM_UNIXTIME(data_ts/1000),'%Y-%m-%d')  as date_temp
												   ,missing_categories
												   ,need_files
												   ,tid as trigger_id
												   ,right(t.uuid,12) as uuid
												   ,uuid as uuid_full
												   ,ota_version
												   ,package_size
											  from files_completeness t
											 where EXISTS (select 1
															 from trigger_id_info_mngt t1
															where t.tid = t1.trigger_id
															  and t1.is_ignore = 0
														  )
										   )fc
									  left join 
										   (select vin
												   ,chery_no
												   ,int_vehicle_no
												   ,vehicle_tested_by
											  from vehicle_info_mngt 
										   ) vim
										on fc.uuid_full = vim.chery_no
									 where fc.date_temp >= '{start_date}' 
									   and fc.date_temp <= '{end_date}' {vinStr} {triggerIdStr} {otasStr} 
								   ) bd 
							 group by bd.date_temp,bd.uuid
							 order by bd.date_temp	;   
                    """
        sql_vin = f"""
                    select min(bd.user_name) as user_name
								   ,min(bd.class_name) as class_name
								   ,bd.vin
								   ,bd.uuid
							  from (select right(fc.uuid,12) as uuid
										   ,if(ISNULL(vim.int_vehicle_no),fc.uuid,vim.int_vehicle_no) as user_name
										   ,if(ISNULL(vim.vehicle_tested_by),'其他',vim.vehicle_tested_by) as class_name
										   ,if(ISNULL(vim.vin),fc.uuid,vim.vin) as vin
									  from (select  right(uuid,12) as uuid
									                ,uuid as uuid_full
                                                   ,tid as trigger_id
												   ,ota_version
												   ,DATE_FORMAT(FROM_UNIXTIME(data_ts/1000),'%Y-%m-%d')  as date_temp
											  from files_completeness 
										   ) fc
									  left join
										   (select vin
												   ,chery_no
												   ,int_vehicle_no
												   ,vehicle_tested_by
											  from vehicle_info_mngt
										   ) vim
										on fc.uuid_full = vim.chery_no
									 where fc.date_temp >= '{start_date}' 
									   and fc.date_temp <= '{end_date}' {vinStr} {triggerIdStr} {otasStr} 	
								   ) bd
							 group by bd.vin,bd.uuid;
                   """
        sql_daily_file_size = f"""
                            with base_data as(
                select
             	right(fc.uuid,12) as uuid
                ,if(ISNULL(vim.vin),right(fc.uuid,12),vim.int_vehicle_no) as user_name
                ,if(ISNULL(vim.vin),'其他',vim.vehicle_tested_by) as class_name
                ,if(ISNULL(vim.vin),right(fc.uuid,12),vim.vin) as vin
                ,DATE_FORMAT(FROM_UNIXTIME(fc.data_ts/1000),'%Y-%m-%d')  as date_temp
                ,fc.tid as trigger_id
                ,fc.ota_version as ota_version
                ,fc.package_size as file_size
                from files_completeness fc
                left join vehicle_info_mngt vim  on vim.chery_no = fc.uuid
                ),
                trigger_info as (
                select fc.date_temp,fc.uuid,count(fc.trigger_id) trigger_count from base_data fc
                where
                fc.date_temp >= '{start_date}' and fc.date_temp <= '{end_date}' {vinStr} {triggerIdStr} {otasStr}
                group by fc.date_temp,fc.uuid
                order by fc.date_temp,fc.uuid
                )
                select 
                fc.date_temp
                ,fc.vin
                ,fc.uuid
                ,min(fc.user_name) as user_name
                ,min(fc.class_name) as class_name
                ,if(max(ti.trigger_count) is null,0,round(sum(fc.file_size)/1024/1024,2)) as file_size_MB
                from base_data fc left join trigger_info ti 
                on ti.date_temp=fc.date_temp and ti.uuid=fc.uuid
                where fc.date_temp >= '{start_date}' and fc.date_temp <= '{end_date}' {vinStr} {triggerIdStr} {otasStr}
                group by fc.date_temp,fc.vin,fc.uuid
                order by fc.date_temp
                              """
        result_daily = (session.execute(text(sql_rate_daily)).all())
        result_daily_vin = (session.execute(text(sql_rate_daily_vin)).all())
        result_vin = (session.execute(text(sql_vin)).all())
        result_daily_file_size = (session.execute(text(sql_daily_file_size)).all())
        # 找到所有的
        for data in result_daily:
            result["data"]["daily_rate"].append({
                "date_temp": data[0],
                "missing_bytesoup_rate": float(data[1] if data[1] else 0),
                "missing_log_rate": float(data[2] if data[2] else 0),
                "missing_json_rate": float(data[3] if data[3] else 0),
                "missing_mbag_rate": float(data[4] if data[4] else 0),
            })
        for data in result_daily_vin:
            result["data"]["daily_vehicle_rate"].append({
                "date_temp": data[0],
                "user_name": data[1],
                "class_name": data[2],
                "vin": data[3],
                "uuid": data[4],
                "ota_version": data[5],
                "total_missing_rate": float(data[6] if data[6] else 0),
                "total_file_size_MB": float(data[7] if data[7] else 0),
                "missing_bytesoup_rate": float(data[8] if data[8] else 0),
                "missing_log_rate": float(data[9] if data[9] else 0),
                "missing_json_rate": float(data[10] if data[10] else 0),
                "missing_mbag_rate": float(data[11] if data[11] else 0)
            })
        for data in result_vin:
            result["data"]["vehicle"].append({
                "uuid": data[3],
                "vin": data[2],
                "user_name": data[0],
                "class_name": data[1],
            })
        for data in result_daily_file_size:
            result["data"]["file_size"].append({
                "date_temp": data[0],
                "vin": data[1],
                "uuid": data[2],
                "user_name": data[3],
                "class_name": data[4],
                "file_size_Mb": float(data[5] if data[5] else 0.01),
            })
        session.commit()
        session.remove()
    except PendingRollbackError:
        session.commit()
        session.remove()
    except Exception as e:
        print(e)
    return json.dumps(result, ensure_ascii=False)


def get_file_size_data_integrity(request,session):
    json_params = request.json
    start_date = json_params["start_date"]
    end_date = json_params["end_date"]
    otas = json_params["ota_versions"].split(",")
    vins = json_params["vehicles"].split(",")
    trigger_ids = json_params["triggerId"].split(",")
    triggerIdStr = ""
    tid_list = []
    for tid in trigger_ids:
        if len(tid) > 0:
            tid_list.append(tid)
    if len(tid_list) > 0:
        triggerIdStr = f" and  sd.trigger_id in ('{"','".join(tid_list)}')"

    otasStr = ""
    otasStrArray = []
    for ota in otas:
        if len(ota) > 0:
            otasStrArray.append(ota)
    if len(otasStrArray) > 0:
        otasStr = f" and  sd.ota_version in ('{"','".join(otasStrArray)}')"
    # 是包含未知版本还是只有未知版本
    hasUnkownOTA = "未知版本" in otasStrArray and len(otasStrArray) > 1
    onlyUnkownOTA = "未知版本" in otasStrArray and len(otasStrArray) == 1
    OTA_DISN = ["(sd.ota_version='' or sd.ota_version='unknown')",
                "((sd.ota_version_type >= 'OTA5' and sd.ota_version_type like '%OTA%' and sd.ota_win_type >= 'WIN2.0') or (sd.ota_version='' or sd.ota_version='unknown'))",
                "(sd.ota_version_type >= 'OTA5' and sd.ota_version_type like '%OTA%' and sd.ota_win_type >= 'WIN2.0')"]
    OTA_DISN_STR = OTA_DISN[2]
    if onlyUnkownOTA:
        OTA_DISN_STR = OTA_DISN[0]
    if hasUnkownOTA:
        OTA_DISN_STR = OTA_DISN[1]
    if onlyUnkownOTA or hasUnkownOTA:
        otasStr = ""
    vins_r = []
    vinStr = ""
    for vin in vins:
        if len(vin) > 0:
            vins_r.append(vin)
    if len(vins_r) > 0:
        vinStr = f" and sd.uuid in ('{"','".join(vins_r)}')"
    # 返回数据
    result = {
        "code": 200,
        "message": "数据完整性-文件大小，文件数量",
        "data": []
    }
    try:
        sql_query = f"""
                with size_data as (
                    select 
                    vim.vin,
                    right(fc.uuid,12) uuid,
                    fc.ota_version as ota_version,
                    DATE_FORMAT(FROM_UNIXTIME(fc.data_ts/1000),'%Y-%m-%d') as date_temp,
                    fc.package_size,
                    fc.file_nums,
                    fc.tid as trigger_id
                    from files_completeness fc
                    left join trigger_id_info_mngt tiim on tiim.trigger_id = fc.tid
                    left join vehicle_info_mngt vim  on vim.chery_no = fc.uuid
                    where tiim.is_ignore = 0
                    order by date_temp asc
                )
                select 
                sd.date_temp,
                round(sum(sd.package_size)/1024/1024/1024,2) as total_size,
                count(sd.file_nums) as total_num 
                from size_data sd 
                where sd.date_temp >= '{start_date}' and sd.date_temp <= '{end_date}' {vinStr} {triggerIdStr} {otasStr}
                group by sd.date_temp
                order by sd.date_temp"""
        result_data = (session.execute(text(sql_query))
                       .all())
        # 找到所有的
        for data in result_data:
            result["data"].append([data[0], format(data[1], '.2f'), format(data[2], '.0f')])
        session.commit()
        session.remove()
    except PendingRollbackError:
        session.commit()
        session.remove()
    except Exception as e:
        print(e)
    return json.dumps(result, ensure_ascii=False)


def data_integrity_hot_map_by_trigger(request,session):
    json_params = request.json
    start_date = json_params['start_date']
    end_date = json_params['end_date']
    otas = json_params['ota_versions'].split(",")
    vins = json_params['vehicles'].split(",")
    trigger_ids = json_params['triggerId'].split(",")
    triggerIdStr = ""
    tid_list = []
    for tid in trigger_ids:
        if len(tid) > 0:
            tid_list.append(tid)
    if len(tid_list) > 0:
        triggerIdStr = f" and  bd.trigger_id in ('{"','".join(tid_list)}')"

    otasStr = ""
    otasStrArray = []
    for ota in otas:
        if len(ota) > 0:
            otasStrArray.append(ota)
    if len(otasStrArray) > 0:
        otasStr = f" and  bd.ota_version in ('{"','".join(otasStrArray)}')"
    # 是包含未知版本还是只有未知版本
    hasUnkownOTA = "未知版本" in otasStrArray and len(otasStrArray) > 1
    onlyUnkownOTA = "未知版本" in otasStrArray and len(otasStrArray) == 1
    OTA_DISN = ["(bd.ota_version='' or bd.ota_version='unknown')",
                "((bd.ota_version_type >= 'OTA5' and bd.ota_version_type like '%OTA%' and bd.ota_win_type >= 'WIN2.0') or (bd.ota_version='' or bd.ota_version='unknown'))",
                "(bd.ota_version_type >= 'OTA5' and bd.ota_version_type like '%OTA%' and bd.ota_win_type >= 'WIN2.0')"]
    OTA_DISN_STR = OTA_DISN[2]
    if onlyUnkownOTA:
        OTA_DISN_STR = OTA_DISN[0]
    if hasUnkownOTA:
        OTA_DISN_STR = OTA_DISN[1]
    if onlyUnkownOTA or hasUnkownOTA:
        otasStr = ""
    vins_r = []
    vinStr = ""
    for vin in vins:
        if len(vin) > 0:
            vins_r.append(vin)
    if len(vins_r) > 0:
        vinStr = f" and bd.uuid in ('{"','".join(vins_r)}')"
    # 返回数据
    result = {
        "code": 200,
        "message": "数据完整性-triggerId维度统计",
        "data": []
    }
    try:
        sql_query = f"""
                with base_data as(
                select
				right(fc.uuid,12) as uuid
                ,if(ISNULL(vim.vin),right(fc.uuid,12),vim.int_vehicle_no) as user_name
                ,if(ISNULL(vim.vin),'其他',vim.vehicle_tested_by) as class_name
                ,if(ISNULL(vim.vin),right(fc.uuid,12),vim.vin) as vin
                ,DATE_FORMAT(FROM_UNIXTIME(fc.data_ts/1000),'%Y-%m-%d')  as date_temp
                ,if(length(fc.missing_categories)>4,1,0) as is_missing
                ,fc.tid as trigger_id
                ,fc.ota_version as ota_version
                ,fc.package_size as file_size
                ,if(LOCATE('bytesoup',fc.missing_categories) > 0,1,0) as missing_bytesoup
                ,if(LOCATE('log',fc.missing_categories) > 0,1,0) as missing_log
                ,if(LOCATE('mbag',fc.missing_categories) > 0,1,0) as missing_mbag
                ,if(LOCATE('json',fc.missing_categories) > 0,1,0) as missing_json
                ,if(LOCATE('bs',fc.need_files) > 0,1,0) as need_bytesoup
                ,if(LOCATE('log',fc.need_files) > 0,1,0) as need_log
                ,if(LOCATE('mbag',fc.need_files) > 0,1,0) as need_mbag
                ,if(LOCATE('json',fc.need_files) > 0,1,0) as need_json
                from files_completeness fc
                left join trigger_id_info_mngt tiim on tiim.trigger_id = fc.tid
                left join vehicle_info_mngt vim  on vim.chery_no = fc.uuid
                where tiim.is_ignore = 0
                )
                select 
                 bd.trigger_id
                ,count(bd.trigger_id) missing_count
                ,bd.date_temp
                ,min(bd.user_name) as user_name
                ,min(bd.class_name) as class_name
                ,min(bd.vin)
                ,min(bd.ota_version) as ota_version
                ,ifnull(round((sum(bd.missing_bytesoup)+sum(bd.missing_log)+sum(bd.missing_json)+sum(bd.missing_mbag))
                /
                (sum(bd.need_bytesoup)+sum(bd.need_log)+sum(bd.need_json)+sum(bd.need_mbag))
                *100,1),0) as total_missing_rate
                ,ifnull(round(sum(bd.file_size)/1024/1024,1),0) as total_file_size_MB
                ,ifnull(round(sum(bd.missing_bytesoup)/sum(bd.need_bytesoup)*100,1),0) missing_bytesoup_rate
                ,ifnull(round(sum(bd.missing_log)/sum(bd.need_log)*100,1),0) missing_log_rate
                ,ifnull(round(sum(bd.missing_json)/sum(bd.need_json)*100,1),0) missing_json_rate
                ,ifnull(round(sum(bd.missing_mbag)/sum(bd.need_mbag)*100,1),0) missing_mbag_rate
                ,ifnull(round(sum(bd.is_missing)/count(bd.trigger_id)*100,2),0) trigger_missing_rate
                ,sum(bd.is_missing) missing_trigger_count
                from base_data bd 
                where bd.date_temp >= '{start_date}' and bd.date_temp <= '{end_date}' {vinStr} {triggerIdStr} {otasStr} 
                group by bd.date_temp,bd.trigger_id
                order by bd.date_temp,bd.trigger_id"""
        result_data = (session.execute(text(sql_query))
                       .all())
        # 找到所有的
        for data in result_data:
            result["data"].append({
                "trigger_id":data[0],
                "trigger_count": int(data[1]),
                "date": data[2],
                "user_name": data[3],
                "class_name": data[4],
                "vehicle": data[5],
                "ota_version":data[6],
                "total_missing_rate": float(data[7]),
                "total_file_size_MB": float(data[8]),
                "missing_bytesoup_rate": float(data[9]),
                "missing_log_rate": float(data[10]),
                "missing_json_rate": float(data[11]),
                "missing_mbag_rate": float(data[12]),
                "trigger_missing_rate": int(data[13]),
                "trigger_missing_count": int(data[14]),
            })
        session.commit()
        session.remove()
    except PendingRollbackError:
        session.commit()
        session.remove()
    except Exception as e:
        print(e)
    return json.dumps(result, ensure_ascii=False)


def trigger_detail_missing_by_vehicle_detail(request,session):
    json_params = request.json
    date = json_params["date"]
    otas = str2Array(json_params["ota_versions"])
    vehicle = str2Array(json_params["vehicle"])
    trigger_id = json_params["triggerId"]
    result = {
        "code": 200,
        "message": "版本号热力图",
        "data": []
    }
    uuidStr = ""
    uuid_array = []
    for uuid in vehicle:
        if len(uuid) > 0:
            uuid_array.append(uuid)
    if len(uuid_array) > 0:
        uuidStr = f" and bd.uuid in ('{"','".join(uuid_array)}')"
    otasStr = ""
    otasStrArray = []
    for ota in otas:
        if len(ota) > 0:
            otasStrArray.append(ota)
    if len(otasStrArray) > 0:
        otasStr = f" and  bd.ota_version in ('{"','".join(otasStrArray)}')"
    result = {
        "code": 200,
        "message": "trigger",
        "data": []
    }
    sql_ = f"""
            with base_data as(
                select
				right(fc.uuid,12) as uuid
                ,if(ISNULL(vim.vin),right(fc.uuid,12),vim.vin) as vin
                ,if(ISNULL(vim.vin),right(fc.uuid,12),vim.int_vehicle_no) as user_name
                ,DATE_FORMAT(FROM_UNIXTIME(fc.data_ts/1000),'%Y-%m-%d')  as date_temp
                ,if(length(fc.missing_categories)>4,1,0) as is_missing
                ,fc.tid as trigger_id
                ,fc.ota_version as ota_version
                from files_completeness fc
                left join trigger_id_info_mngt tiim on tiim.trigger_id = fc.tid
                left join vehicle_info_mngt vim  on vim.chery_no = fc.uuid
                where tiim.is_ignore = 0 and length(fc.missing_categories)>4 and fc.tid={trigger_id}
                )
                select 
                bd.date_temp
                ,bd.uuid
                ,min(bd.vin)
                ,min(bd.user_name) as user_name
                ,sum(bd.is_missing) missing_trigger_count
                from base_data bd 
                where bd.date_temp='{date}' {otasStr} {uuidStr}
                group by bd.date_temp,bd.uuid
                order by bd.date_temp,bd.uuid
        """
    # print(sql_)
    try:
        result_data = (session.execute(text(sql_)).all())
        data_list = []
        for data in result_data:
            data_list.append(
                {
                    "trigger_id": trigger_id,
                    "times": int(data[4]),
                    "vehicle_vin": data[2],
                    "vehicle_uuid": data[1],
                    "vehicle_name": data[3],
                }
            )
        result["data"] = data_list
        session.commit()
        session.remove()
    except PendingRollbackError:
        session.commit()
        session.remove()
    except Exception as e:
        print(e)
    return json.dumps(result, ensure_ascii=False)


def data_integrity_hot_map_by_vehicle(request,session):
    json_params = request.json
    start_date = json_params['start_date']
    end_date = json_params['end_date']
    otas = json_params['ota_versions'].split(",")
    uuids = json_params['vehicles'].split(",")
    trigger_ids = json_params['triggerId'].split(",")
    triggerIdStr = ""
    tid_list = []
    for tid in trigger_ids:
        if len(tid) > 0:
            tid_list.append(tid)
    if len(tid_list) > 0:
        triggerIdStr = f" and  bd.trigger_id in ('{"','".join(tid_list)}')"
    otasStr = ""
    otasStrArray = []
    for ota in otas:
        if len(ota) > 0:
            otasStrArray.append(ota)
    if len(otasStrArray) > 0:
        otasStr = f" and  bd.ota_version in ('{"','".join(otasStrArray)}')"
    # 是包含未知版本还是只有未知版本
    hasUnkownOTA = "未知版本" in otasStrArray and len(otasStrArray) > 1
    onlyUnkownOTA = "未知版本" in otasStrArray and len(otasStrArray) == 1
    OTA_DISN = ["(bd.ota_version='' or bd.ota_version='unknown')",
                "((bd.ota_version_type >= 'OTA5' and bd.ota_version_type like '%OTA%' and bd.ota_win_type >= 'WIN2.0') or (bd.ota_version='' or bd.ota_version='unknown'))",
                "(bd.ota_version_type >= 'OTA5' and bd.ota_version_type like '%OTA%' and bd.ota_win_type >= 'WIN2.0')"]
    OTA_DISN_STR = OTA_DISN[2]
    if onlyUnkownOTA:
        OTA_DISN_STR = OTA_DISN[0]
    if hasUnkownOTA:
        OTA_DISN_STR = OTA_DISN[1]
    if onlyUnkownOTA or hasUnkownOTA:
        otasStr = ""
    vins_r = []
    uuidStr = ""
    for uuid in uuids:
        if len(uuid) > 0:
            vins_r.append(uuid)
    if len(vins_r) > 0:
        uuidStr = f" and bd.uuid in ('{"','".join(vins_r)}')"
    # 返回数据
    result = {
        "code": 200,
        "message": "数据完整性-triggerId维度统计",
        "data": []
    }
    try:
        sql_query = f"""
                with base_data as(
                select
				right(fc.uuid,12) as uuid
                ,if(ISNULL(vim.vin),right(fc.uuid,12),vim.int_vehicle_no) as user_name
                ,if(ISNULL(vim.vin),'其他',vim.vehicle_tested_by) as class_name
                ,if(ISNULL(vim.vin),right(fc.uuid,12),vim.vin) as vin
                ,DATE_FORMAT(FROM_UNIXTIME(fc.data_ts/1000),'%Y-%m-%d')  as date_temp
                ,if(length(fc.missing_categories)>4,1,0) as is_missing
                ,fc.tid as trigger_id
                ,fc.ota_version as ota_version
                ,fc.package_size as file_size
                ,if(LOCATE('bytesoup',fc.missing_categories) > 0,1,0) as missing_bytesoup
                ,if(LOCATE('log',fc.missing_categories) > 0,1,0) as missing_log
                ,if(LOCATE('mbag',fc.missing_categories) > 0,1,0) as missing_mbag
                ,if(LOCATE('json',fc.missing_categories) > 0,1,0) as missing_json
                ,if(LOCATE('bs',fc.need_files) > 0,1,0) as need_bytesoup
                ,if(LOCATE('log',fc.need_files) > 0,1,0) as need_log
                ,if(LOCATE('mbag',fc.need_files) > 0,1,0) as need_mbag
                ,if(LOCATE('json',fc.need_files) > 0,1,0) as need_json
                from files_completeness fc
                left join vehicle_info_mngt vim  on vim.chery_no = fc.uuid
                )
                select 
                bd.uuid,
                count(bd.trigger_id) trigger_count,
                bd.date_temp
                ,min(bd.user_name) as user_name
                ,min(bd.class_name) as class_name
                ,min(bd.vin)
                ,min(bd.ota_version) as ota_version
                ,ifnull(round((sum(bd.missing_bytesoup)+sum(bd.missing_log)+sum(bd.missing_json)+sum(bd.missing_mbag))
                /
                (sum(bd.need_bytesoup)+sum(bd.need_log)+sum(bd.need_json)+sum(bd.need_mbag))
                *100,1),0) as total_missing_rate
                ,ifnull(round(sum(bd.file_size)/1024/1024,1),0) as total_file_size_MB
                ,ifnull(round(sum(bd.missing_bytesoup)/sum(bd.need_bytesoup)*100,1),0) missing_bytesoup_rate
                ,ifnull(round(sum(bd.missing_log)/sum(bd.need_log)*100,1),0) missing_log_rate
                ,ifnull(round(sum(bd.missing_json)/sum(bd.need_json)*100,1),0) missing_json_rate
                ,ifnull(round(sum(bd.missing_mbag)/sum(bd.need_mbag)*100,1),0) missing_mbag_rate
                ,ifnull(round(sum(bd.is_missing)/count(bd.trigger_id)*100,2),0) trigger_missing_rate
                ,sum(bd.is_missing) missing_trigger_count
                from base_data bd
                where bd.date_temp >= '{start_date}' and bd.date_temp <= '{end_date}' {otasStr} {uuidStr} {triggerIdStr}
                group by bd.date_temp,bd.uuid
                order by bd.date_temp,bd.uuid"""
        result_data = (session.execute(text(sql_query))
                       .all())
        # 找到所有的
        for data in result_data:
            result["data"].append({
                "uuid": data[0],
                "trigger_count": int(data[1]),
                "date": data[2],
                "user_name": data[3],
                "class_name": data[4],
                "vehicle": data[5],
                "ota_version": data[6],
                "total_missing_rate": float(data[7]),
                "total_file_size_MB": float(data[8]),
                "missing_bytesoup_rate": float(data[9]),
                "missing_log_rate": float(data[10]),
                "missing_json_rate": float(data[11]),
                "missing_mbag_rate": float(data[12]),
                "trigger_missing_rate": int(data[13]),
                "trigger_missing_count": int(data[14]),
            })
        session.commit()
        session.remove()
    except PendingRollbackError:
        session.commit()
        session.remove()
    except Exception as e:
        print(e)
    return json.dumps(result, ensure_ascii=False)


def data_integrity_hot_map_by_vehicle_detail(request,session):
    json_params = request.json
    date = json_params["date"]
    vehicle = json_params["vehicle"]
    otas = str2Array(json_params["ota_versions"])
    trigger_ids = json_params['triggerId'].split(",")
    result = {
        "code": 200,
        "message": "版本号热力图",
        "data": []
    }
    triggerIdStr = ""
    tid_list = []
    for tid in trigger_ids:
        if len(tid) > 0:
            tid_list.append(tid)
    if len(tid_list) > 0:
        triggerIdStr = f" and fc.tid in ('{"','".join(tid_list)}')"
    otasStr = ""
    otasStrArray = []
    for ota in otas:
        if len(ota) > 0:
            otasStrArray.append(ota)
    if len(otasStrArray) > 0:
        otasStr = f" and  bd.ota_version in ('{"','".join(otasStrArray)}')"
    result = {
        "code": 200,
        "message": "trigger",
        "data": []
    }
    sql_ = f"""
                with base_data as(
                    select
    				right(fc.uuid,12) as uuid
                    ,if(ISNULL(vim.vin),right(fc.uuid,12),vim.vin) as vin
                    ,if(ISNULL(vim.vin),right(fc.uuid,12),vim.int_vehicle_no) as user_name
                    ,DATE_FORMAT(FROM_UNIXTIME(fc.data_ts/1000),'%Y-%m-%d')  as date_temp
                    ,if(length(fc.missing_categories)>4,1,0) as is_missing
                    ,fc.tid as trigger_id
                    ,fc.ota_version as ota_version
                    from files_completeness fc
                    left join trigger_id_info_mngt tiim on tiim.trigger_id = fc.tid
                    left join vehicle_info_mngt vim  on vim.chery_no = fc.uuid
                    where tiim.is_ignore = 0 and length(fc.missing_categories)>4 and right(fc.uuid,12)='{vehicle}' {triggerIdStr}
                    )
                    select 
                    bd.trigger_id
                    ,min(bd.vin)
                    ,min(bd.user_name) as user_name
                    ,sum(bd.is_missing) missing_trigger_count
                    from base_data bd 
                    where  bd.date_temp='{date}' {otasStr}
                    group by bd.date_temp,bd.trigger_id
                    order by bd.date_temp,bd.trigger_id
            """
    try:
        result_data = (session.execute(text(sql_)).all())
        data_list = []
        for data in result_data:
            data_list.append(
                {
                    "trigger_id": data[0],
                    "vehicle_uuid": vehicle,
                    "vehicle_vin": data[1],
                    "vehicle_name": data[2],
                    "times": int(data[3]),
                }
            )
        result["data"] = data_list
        session.commit()
        session.remove()
    except PendingRollbackError:
        session.commit()
        session.remove()
    except Exception as e:
        print(e)
    return json.dumps(result, ensure_ascii=False)