import json
from sqlalchemy import text
from sqlalchemy.exc import PendingRollbackError


def homepage(request,session):
    json_param = request.json
    start_date = json_param["start_date"]
    end_date = json_param["end_date"]
    trigger_id = json_param["triggerId"]
    otas = json_param["ota_versions"].split(",")
    triggerIdList = []
    if len(trigger_id) > 0:
        trigger_ids = trigger_id.split(",")
        for tid in trigger_ids:
            if tid:
                triggerIdList.append(tid)
    where_ten = ""
    otasStr = ""
    otasStrArray = []
    for ota in otas:
        if len(ota) > 0:
            otasStrArray.append(ota)
    if len(otasStrArray) > 0:
        otasStr = f" and dt.ota_version in ('{"','".join(otasStrArray)}')"
    result = {
        "code": 200,
        "message": "主页数据",
        "data": {
            "tirgger_id_top_five": [],
            "total_vehicle_cnt": 0,
            "latest_version": {},
            "vehicle_trigger_id_top_ten": [],
            "daily_version_ratio": [],
            "versions_cnt_top": []
        }
    }
    try:
        # 触发次数top10
        if len(triggerIdList) > 0:
            where_ten = f" and dt.trigger_id in ('{"','".join(triggerIdList)}')"
        top_ten_sql = f"""
                    with data_temp as(
                    select 
                    vim.vin,
                    vim.int_vehicle_no ,
                    brd.trigger_id,
                    right(brd.uuid,12) uuid,
                    brd.ota_version as ota_version,
                    DATE_FORMAT(FROM_UNIXTIME(brd.data_ts/1000),'%Y-%m-%d')  as date_temp
                    from beta_raw_data brd left join vehicle_info_mngt vim on vim.chery_no = brd.uuid
                    )
                    select max(dt.vin) vin,dt.int_vehicle_no ,count(dt.trigger_id) count_trigger,dt.uuid
                    from data_temp dt
                    where dt.date_temp >= '{start_date}' and dt.date_temp <= '{end_date}' {where_ten} {otasStr}
                    group by dt.uuid,dt.int_vehicle_no
                    order by count(dt.trigger_id) desc
                    limit 5
                """
        vehicle_trigger_id_top_ten = (session.execute(text(top_ten_sql)).all())
        trigger_sql_str_version_total = ""
        if len(triggerIdList) > 0:
            trigger_sql_str_version_total = f" and trigger_id in ('{"','".join(triggerIdList)}')"
        for data in vehicle_trigger_id_top_ten:
            result["data"]["vehicle_trigger_id_top_ten"].append(
                {"vin": data[0], "vehicle": data[1], "count": data[2], "uuid": data[3]})
        # 版本总数
        version_total_sql = f"""
                    with data_temp as(
                    select 
                    brd.trigger_id,
                    brd.ota_version as ota_version,
                    DATE_FORMAT(FROM_UNIXTIME(brd.data_ts/1000),'%Y-%m-%d')  as date_temp 
                    from beta_raw_data brd
                    )
                    select
                    left(right(dt.ota_version, 20), 17) all_version
                    from data_temp dt
                    where dt.date_temp >= '{start_date}' and dt.date_temp <= '{end_date}' {trigger_sql_str_version_total}  {otasStr}
                    group by dt.ota_version
                        """
        version_total_result = (session.execute(text(version_total_sql)).all())
        result["data"]["version_total"] = len(version_total_result)
        # 车辆总数
        vehicle_total_sql = f"""
                            with data_temp as(
                            select 
                            brd.ota_version as ota_version,
                            DATE_FORMAT(FROM_UNIXTIME(brd.data_ts/1000),'%Y-%m-%d') as date_temp ,
                            brd.trigger_id,
                            brd.uuid
                            from beta_raw_data brd
                            )
                            select
                            dt.uuid
                            from data_temp dt
                            where dt.date_temp >= '{start_date}' and dt.date_temp <= '{end_date}' {trigger_sql_str_version_total}  {otasStr}
                            group by dt.uuid
                        """
        vehicle_total_result = (session.execute(text(vehicle_total_sql)).all())
        result["data"]["total_vehicle_cnt"] = len(vehicle_total_result)
        # 触发次数 top5
        trigger_count_top5_sql = f"""
                with data_temp as(
                select 
                tiim.retrun_describe,
                tiim.involve_function,
                brd.ota_version as ota_version,
                brd.trigger_id,
                tiim.retrun_describe_EN,
                DATE_FORMAT(FROM_UNIXTIME(brd.data_ts/1000),'%Y-%m-%d')  as date_temp 
                from beta_raw_data brd left join trigger_id_info_mngt tiim on tiim.trigger_id = brd.trigger_id
                where tiim.is_ignore = 0
                )
                select 
                dt.trigger_id,
                dt.retrun_describe,
                count(dt.trigger_id) counts,
                dt.involve_function,
                dt.retrun_describe_EN
                from data_temp dt
                where dt.date_temp >= '{start_date}' and dt.date_temp <= '{end_date}' {trigger_sql_str_version_total} {otasStr}
                group by dt.trigger_id
                order by count(dt.trigger_id) desc
                limit 5
        """
        trigger_count_top5_result = (session.execute(text(trigger_count_top5_sql)).all())
        # 将trigger_id top5数据写入返回json中
        for data in trigger_count_top5_result:
            result["data"]["tirgger_id_top_five"].append(
                {"trigger_id": data[0], "trigger_id_describe": data[1],"involve_function": data[3], "count": data[2],"trigger_id_describe_en":data[4]})

        # 触发次数总计
        trigger_id_total_sql = f"""
                    with data_temp as(
                    select 
                    tiim.retrun_describe,
                    brd.ota_version,
                    brd.trigger_id,
                    DATE_FORMAT(FROM_UNIXTIME(brd.data_ts/1000),'%Y-%m-%d')  as date_temp 
                    from beta_raw_data brd left join trigger_id_info_mngt tiim on tiim.trigger_id = brd.trigger_id
                    where tiim.is_ignore = 0
                    )
                    select 
                    count(dt.trigger_id) times
                    from data_temp dt
                    where dt.date_temp >= '{start_date}' and dt.date_temp <= '{end_date}' {trigger_sql_str_version_total}  {otasStr}
            """
        trigger_id_total_sql_result = (session.execute(text(trigger_id_total_sql)).all())
        # 将trigger_id top5数据写入返回json中
        tkr = trigger_id_total_sql_result[0][0]
        result["data"]["trigger_id_total"] = int(tkr if tkr is not None else 0 )

        miss_rate_trigger_where = ""
        if len(triggerIdList) > 0:
            miss_rate_trigger_where = f" and fc.tid in ({",".join(triggerIdList)})"
        # 总缺失率
        data_missing_total_sql = f"""
                with base_data as(
                select
                vim.int_vehicle_no as user_name
                ,vim.vehicle_tested_by as class_name
                ,vim.vin
                ,DATE_FORMAT(FROM_UNIXTIME(fc.data_ts/1000),'%Y-%m-%d')  as date_temp
                ,fc.tid as trigger_id
                ,fc.ota_version
                ,fc.package_size/1024/1024 as file_size_MB
                ,if(LOCATE('bytesoup',fc.missing_categories) > 0,1,0) as missing_bytesoup
                ,if(LOCATE('log',fc.missing_categories) > 0,1,0) as missing_log
                ,if(LOCATE('mbag',fc.missing_categories) > 0,1,0) as missing_mbag
                ,if(LOCATE('json',fc.missing_categories) > 0,1,0) as missing_json
                ,if(LOCATE('bs',fc.need_files) > 0,1,0) as need_bytesoup
                ,if(LOCATE('log',fc.need_files) > 0,1,0) as need_log
                ,if(LOCATE('mbag',fc.need_files) > 0,1,0) as need_mbag
                ,if(LOCATE('json',fc.need_files) > 0,1,0) as need_json
                from files_completeness fc left join vehicle_info_mngt vim  on vim.chery_no = fc.uuid
                where fc.is_ignored_tid = 0 {miss_rate_trigger_where}
                )
                select 
                round(sum(dt.missing_bytesoup)/if(sum(dt.need_bytesoup)=0,1,sum(dt.need_bytesoup))*100,1) missing_bytesoup_rate
                ,round(sum(dt.missing_log)/if(sum(dt.need_log)=0,1,sum(dt.need_log))*100,1) missing_log_rate
                ,round(sum(dt.missing_json)/if(sum(dt.need_json)=0,1,sum(dt.need_json))*100,1) missing_json_rate
                ,round(sum(dt.missing_mbag)/if(sum(dt.need_mbag)=0,1,sum(dt.need_mbag))*100,1) missing_mbag_rate
                from base_data dt
                where dt.date_temp >= '{start_date}' and dt.date_temp <= '{end_date}' {otasStr}
                order by dt.date_temp"""
        data_missing_total_result = (session.execute(text(data_missing_total_sql)).all())
        # 将数据写入返回json中
        miss_1 = data_missing_total_result[0][0]
        miss_2 = data_missing_total_result[0][1]
        miss_3 = data_missing_total_result[0][2]
        miss_4 = data_missing_total_result[0][3]
        result["data"]["data_missing_total"] = {
                "missing_bytesoup_rate": float(miss_1 if miss_1 is not None else 0),
                "missing_log_rate": float(miss_2 if miss_2 is not None else 0),
                "missing_json_rate": float(miss_3 if miss_3 is not None else 0),
                "missing_mbag_rate": float(miss_4 if miss_4 is not None else 0),
            }
        file_size_sql = f"""
            with base_data as(
            select
            DATE_FORMAT(FROM_UNIXTIME(fc.data_ts/1000),'%Y-%m-%d')  as date_temp
            ,fc.tid as trigger_id
            ,fc.package_size/1024/1024 as file_size_MB,
            fc.ota_version
            from files_completeness fc
            left join trigger_id_info_mngt tiim on tiim.trigger_id = fc.tid
            left join vehicle_info_mngt vim  on vim.chery_no = fc.uuid
            where tiim.is_ignore = 0
            )
            select round(sum(dt.file_size_MB),2) from base_data dt
            where dt.date_temp >= '{start_date}' and dt.date_temp <= '{end_date}' {trigger_sql_str_version_total} {otasStr}
        """
        file_size_result = (session.execute(text(file_size_sql)).all())
        result["data"]["file_size_total"] = float(file_size_result[0][0] if file_size_result[0][0] is not None else 0)
        # speed_sql = f"""
        #  with data_temp as(
        #  select DATE_FORMAT(pas.date, '%Y-%m-%d') AS date_temp,
        #  pas.file_size,pas.cost
        #  from parse_api_speed pas)
        #  select sum(((dt.file_size/1024)/(dt.cost/1000))/1024)/count(dt.file_size) speed_avg
        #  from data_temp dt 
        #  where dt.date_temp >= '{start_date}' and dt.date_temp <= '{end_date}'"""
        speed_sql = f"""
         select sum(sum_speed/1024)/sum(data_count) as speed_avg
           from parse_api_speed_agg dt
          where date >= '{start_date}' and date <= '{end_date}'
         """        
        speed_result = (session.execute(text(speed_sql)).all())
        result["data"]["speed_avg"] = float(speed_result[0][0] if speed_result[0][0] is not None else 0)
        ota_for_upcloud = ""
        if len(otasStrArray) > 0:
            ota_for_upcloud = f" and ekt.vs in ('{"','".join(otasStrArray)}')"
        upcloud_sql=f"""
            select round(sum(res.top_)/(sum(res.top_)+sum(res.bottom_1)+sum(res.bottom_2))*100,1) rate_
            from
            (
            select if(dt.tid =100032,count(dt.tid),0) top_,
            if(dt.tid = 100033,count(dt.tid),0) bottom_1,
            if(dt.tid = 100030,count(dt.tid),0) bottom_2 
            from (
            (select 
            DATE_FORMAT(FROM_UNIXTIME(ekt.ts/1000),'%Y-%m-%d')  as date_temp
            ,ekt.tid tid from explor_kpi_tid_100032 ekt
            where 1=1 {ota_for_upcloud})
            union all
            (select 
            DATE_FORMAT(FROM_UNIXTIME(ekt.ts/1000),'%Y-%m-%d')  as date_temp
            ,ekt.tid tid from explor_kpi_tid_100033 ekt
            where  1=1 {ota_for_upcloud})
            union all
            (select 
            DATE_FORMAT(FROM_UNIXTIME(ekt.ts/1000),'%Y-%m-%d')  as date_temp
            ,ekt.tid tid from explor_kpi_tid_100030 ekt
            where  1=1 {ota_for_upcloud})
            ) dt
            where dt.date_temp >= '{start_date}' and dt.date_temp <= '{end_date}'
            group by dt.tid
            ) res"""
        upcloud_result = (session.execute(text(upcloud_sql)).all())
        result["data"]["upcloud_total"] = float(upcloud_result[0][0] if upcloud_result[0][0] is not None else 0)
        sql_itc = f"""
               with data_temp as (
                select DATE_FORMAT(FROM_UNIXTIME(ekt.ts/1000),'%Y-%m-%d')  as date_temp,
                ekt.tid from explor_kpi_tid_100031 ekt
                where  1=1 {ota_for_upcloud}
                )
                select count(dt.tid) as tri_count 
                from data_temp dt
                where dt.date_temp >= '{start_date}' and dt.date_temp <= '{end_date}'
"""
        itc_result = (session.execute(text(sql_itc)).all())
        result["data"]["itc_total"] = float(itc_result[0][0] if itc_result[0][0] is not None else 0)
        # print(top_ten_sql,version_total_sql)
        session.commit()
        session.remove()
    except PendingRollbackError:
        session.commit()
        session.remove()
    except Exception as e:
        print(e)
    return json.dumps(result, ensure_ascii=False)