package org.example.dev;


import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.sql.*;
import org.apache.spark.sql.types.DataTypes;
import org.example.develop.jxxxOrderGroupUDF;
import org.example.utils.MysqlUtils_3;
import org.example.utils.PrepareDimTable;

import java.io.IOException;

import static org.apache.spark.sql.functions.*;

/**
 * Class description: 机车基本信息,用于增强 TCMS实时数据内容以及处理模型数据的通用方法;
 *
 * @author tiancy
 * @version 1.0
 * @since 2023/9/15
 */
public class GeneralDataUtils {

    /**
     * @param spark
     * @param sqlContext
     * @param cxIn       适用车型,这里可以输入指定的车型,也可以是多个车型的组合,eg: in('HXD2','HXD1D','HXD3')
     * @param bjpjmIn    适用的部件,这里需要传入部件单件码,也就是要做的具体的部件,例如 牵引变流器(PJ00003),也可以是一个集合.
     *                   getTrainAttributesDim(spark, "in ('HXD2','HXD1D','HXD3') ", "in ('PJ00003') ");
     * @return
     */
    public static void getTrainAttributesDim(SparkSession spark, SQLContext sqlContext, String cxIn, String bjpjmIn) throws IOException {

        // 注册自定义函数,用来处理 检修任务单中的 部件检修信息合并功能
        spark.udf().register("jxxxOrderGroupUDF", new jxxxOrderGroupUDF(), DataTypes.StringType);
        /*
            配属字典本身数据存储的是车型代码以及车辆的制造和配属信息，
            需要将车型代码和车型字典中的车型代码相关联，获得车型，同时关联动力类型字典，获得动力类型
            将配属字典的路局和机务段相关联可得到路局和机务段的简称
            关联关系：pszd.cxid = cxzd.cxdm   cxzd.dllx = dllx.jxdllxdm  pszd.LJ_ID = LJZD.ljdm   pszd.psd_id = jwdzd.jwddm
            最终获取到 配属信息、车型信息、动力类型、路局信息以及机务段信息
            s_train_id,cx,cx_id,ch,lj_id,lj,ljjc,psd_id,psd,psdjc,yyd_id,yyd,yydjc,zzc,zzrq,psrq,jcdllxmc,ljpxm,s_train_type
         */
        PrepareDimTable.ps_cx_dllx_lj_psd(spark);
        spark.sql("select s_train_id,cx,cx_id,ch,lj_id,lj,ljjc,psd_id,psd,psdjc,yyd_id,yyd,yydjc,zzc,zzrq,psrq,jcdllxmc,ljpxm,s_train_type from ps_cx_dllx_lj_jwd")
                .filter(" cx " + cxIn + " ").createOrReplaceTempView("ps");
        System.out.println("==== ps");
        spark.sql("select * from ps order by s_train_id").show(50, false);

        // 获取所有车的机车履历信息
        MysqlUtils_3.readMysqlToTempTable(spark, "dim_jmis_jcda_jcll_1");
        String sql =
                "select cxm,ch,s_train_id,gxmc,wz,pjflbm,pjxlh,pjdjm,zzrq,sccj,gzxh,changetime\n"
                        + "from (\n"
                        + "    select cxm, ch, gxmc, pjflbm, wz, pjxlh, pjdjm, concat(cxm, '-', ch) as s_train_id, zzrq, sccj, gzxh,\n"
                        + "           changetime, row_number() over (partition by cxm, ch, pjflbm, wz order by changetime desc) as rank_jcll\n"
                        + "    from dim_jmis_jcda_jcll_1\n"
                        + "    where cxm " + cxIn + "\n"
                        + "      and pjflbm " + bjpjmIn + "\n"
                        + ") as jcll_temp\n"
                        + "where rank_jcll = 1";
        spark.sql(sql).createOrReplaceTempView("jcll");
        System.out.println("====== jcll");
        spark.sql("select * from jcll order by s_train_id").show(50, false);


        // 获取构型字典中中的数据
        MysqlUtils_3.readMysqlToTempTable(spark, "dim_jmis_t_jcbm_jcgxzd");
        // 从模型详情表中获取到构型这里具体部件的系统信息,包括:系统名称、系统编码、系统PJM
        MysqlUtils_3.readMysqlToTempTable(spark, "t_phm_mxxq");
        spark.sql("select sycx,ssxt, xtbm,xtpjm, flmc, fljc, flbm, lbjbm, wzmc\n" +
                        "from (\n" +
                        "   select sycx, flmc, fljc, flbm, lbjbm, wzmc\n" +
                        "   from dim_jmis_t_jcbm_jcgxzd\n" +
                        "   where sycx  " + cxIn + " and lbjbm " + bjpjmIn + "\n" +
                        ") gx\n" +
                        "join (select distinct syjx,ssxt,xtbm,xtpjm,bjpjm from t_phm_mxxq where syjx " + cxIn + " and bjpjm " + bjpjmIn + ") mx on gx.sycx = mx.syjx and gx.lbjbm = mx.bjpjm")
                .createOrReplaceTempView("gxzd");
        System.out.println("==== gxzd");
        spark.sql("select * from gxzd").show(20, false);

        MysqlUtils_3.readMysqlToTempTable(spark, "dim_jmis_jcda_t_jxglzxgl_v");
        MysqlUtils_3.readMysqlToTempTable(spark, "dim_jmis_jxgl_t_jxgl_jxdt");
        MysqlUtils_3.readMysqlToTempTable(spark, "dim_jmis_jcda_zxgl_day");
        MysqlUtils_3.readMysqlToTempTable(spark, "dim_jmis_t_jcbm_zjxc");
        // 获取走行1、走行2、走行3的数据
        spark.sql("select concat(cx,'-',ch) as S_TRAIN_ID,round(LJZX) as LJZX from dim_jmis_jcda_t_jxglzxgl_v where cx " + cxIn + "").createOrReplaceTempView("zx1");
        spark.sql("" +
                "select S_TRAIN_ID, ROUND(SUM(ZXGL_DAY), 0) AS LJZX\n" +
                "from (\n" +
                "       SELECT CONCAT(cxjc, '-', ch) AS S_TRAIN_ID, MAX(JGRQ) AS JXRQ\n" +
                "       FROM dim_jmis_jxgl_t_jxgl_jxdt\n" +
                "       WHERE RC IN ('C4修', 'C5修', 'C6修')\n" +
                "       GROUP BY cxjc, ch\n" +
                ") jxdt_mid\n" +
                "LEFT JOIN dim_jmis_jcda_zxgl_day zx  ON jxdt_mid.S_TRAIN_ID = CONCAT(zx.CX, '-', zx.CH) AND zx.REPOT_DATE > jxdt_mid.JXRQ\n" +
                "GROUP BY S_TRAIN_ID").createOrReplaceTempView("zx2");

        spark.sql("" +
                "SELECT S_TRAIN_ID, ROUND(SUM(ZXGL_DAY), 0) AS LJZX\n" +
                "FROM (\n" +
                "       SELECT CONCAT(zjxc.cxjc, '-', zjxc.ch) AS S_TRAIN_ID, zx.ZXGL_DAY AS ZXGL_DAY\n" +
                "       FROM dim_jmis_t_jcbm_zjxc zjxc LEFT JOIN dim_jmis_jcda_zxgl_day zx ON CONCAT(zx.CX, '-', zx.CH) = CONCAT(zjxc.cxjc, '-', zjxc.ch) AND zx.REPOT_DATE > zjxc.KCRQ\n" +
                "     ) A\n" +
                "GROUP BY S_TRAIN_ID").filter("ljzx is not null").createOrReplaceTempView("zx3");

        //s_train_id,cx,cx_id,ch,lj_id,lj,ljjc,psd_id,psd,psdjc,yyd_id,yyd,yydjc,zzc,zzrq,psrq,jcdllxmc,ljpxm,s_train_type,
        //ssxt,xtbm,xtpjm,flmc,fljc,flbm,lbjbm,wzmc,
        //gxmc,wz,pjflbm,pjxlh,pjdjm,zzrq,sccj,gzxh,changetime,
        //ljzxgl,gjxhzx,zjxhzx
        spark.sql("" +
                "select ps.s_train_id,ps.cx,ps.cx_id,ps.ch,ps.lj_id,ps.lj,ps.ljjc,ps.psd_id,ps.psd,ps.psdjc,ps.yyd_id,ps.yyd,ps.yydjc,ps.zzc,ps.zzrq as zzrq_ps,ps.psrq,ps.jcdllxmc,ps.ljpxm,ps.s_train_type,\n" +
                // lbjbm 和 pjflbm 的值是相同的,都是 PJ00003
                "gxzd.ssxt, gxzd.xtbm, gxzd.xtpjm, gxzd.flmc, gxzd.fljc, gxzd.flbm, gxzd.lbjbm,jcll.pjflbm,gxzd.wzmc,\n" +
                "jcll.gxmc,jcll.wz,jcll.pjdjm,jcll.pjxlh,jcll.zzrq as zzrq_jcll,jcll.sccj,jcll.gzxh,jcll.changetime,\n" +
                "cast(round(z1.ljzx) as string) ljzxgl,cast(round(z2.ljzx) as string) gjxhzx,cast(round(z3.ljzx) as string) zjxhzx\n" +
                "from ps\n" +
                "     left join gxzd on ps.cx = gxzd.sycx\n" +
                "     left join jcll on ps.s_train_id = jcll.s_train_id and gxzd.wzmc = jcll.wz\n" +
                "     left join zx1 z1 on ps.s_train_id = z1.s_train_id\n" +
                "     left join zx2 z2 on ps.s_train_id = z2.s_train_id\n" +
                "     left join zx3 z3 on ps.s_train_id = z3.s_train_id").createOrReplaceTempView("ps_gxzd_jcll_zx");
        System.out.println(" = = =  = ps_gxzd_jcll_zx");
        spark.sql("select * from ps_gxzd_jcll_zx order by s_train_id").show(50, false);

        // 获取新造数据
        // {"部件状态":"新造","生产厂家":"中车株洲电力机车有限公司","检修段":"-","日期":"-","本次走行公里":"-","下车型号":"-","下车位置":"-","上车型号":"HXD1D-0001","上车位置":"1"}
        spark.sql("" +
                "select *,\n" +
                "       concat('{\"部件状态\":\"新造\",',\n" +
                // 这里获取的是jcll表中的数据
                "              '\"生产厂家\":\"', if(sccj is not null, sccj, '-'), '\",',\n" +
                "              '\"检修段\":\"-\",',\n" +
                "              '\"日期\":\"', if(changetime is not null, changetime, '-'), '\",',\n" +
                "              '\"本次走行公里\":\"-\",',\n" +
                "              '\"下车型号\":\"-\",',\n" +
                "              '\"下车位置\":\"-\",',\n" +
                "              '\"上车型号\":\"', if(s_train_id is not null, s_train_id, '-'), '\",',\n" +
                "              '\"上车位置\":\"1\"}') jcll_jxxx\n" +
                "from ps_gxzd_jcll_zx\n").createOrReplaceTempView("ps_gxzd_jcll_zx_jxxx");

        // 获取检修记录单中拼起来的检修信息
        // {"部件状态":"1C5修","生产厂家":"中车株洲电机","检修段":"中车株洲电机","日期":"2018-08-13 11:08:32","本次走行公里":"1098763","下车型号":"HXD1D-0197","下车位置":"","上车型号":"HXD1D-0197","上车位置":"1"}
        MysqlUtils_3.readMysqlToTempTable(spark, "dim_jmis_t_jxgl_jxjld_jbxx");
        spark.sql("" +
                "with base_data as (\n" +
                "                    select concat(sccx, '-', scch) as scxh,\n" +
                "                           concat(xccx, '-', xcch) as xcxh,\n" +
                "                           scwz,\n" +
                "                           xcwz,\n" +
                "                           lbjbm,\n" +
                "                           concat(rt, rc) as bjzt,\n" +
                "                           zzcj as sccj,\n" +
                "                           cxdw as jxd,\n" +
                "                           create_time as rq,\n" +
                "                           bczxgl,\n" +
                "                           date(jxrq_js) as xcrq,\n" +
                "                           date(jxrq_js) as scrq\n" +
                "                    from dim_jmis_t_jxgl_jxjld_jbxx\n" +
                "                    where record_status = '0' and sccx " + cxIn + " and lbjbm " + bjpjmIn + "\n" +
                "                  ),\n" +
                "     ranked_data as (\n" +
                "                      select lbjbm, bjzt, sccj, jxd, rq, bczxgl, xcxh, xcwz, scxh, scwz,\n" +
                "                             row_number() over (partition by scxh, lbjbm,scwz,bjzt order by rq desc) as rk\n" +
                "                      from base_data\n" +
                "                    ),\n" +
                "     formatted_data as (\n" +
                "                         select scxh, lbjbm, scwz,\n" +
                "                                concat(\n" +
                "                                        '{\"部件状态\":\"', if(bjzt is not null, bjzt, '-'),\n" +
                "                                        '\",\"生产厂家\":\"', if(sccj is not null, sccj, '-'),\n" +
                "                                        '\",\"检修段\":\"', if(jxd is not null, jxd, '-'),\n" +
                "                                        '\",\"日期\":\"', if(rq is not null, rq, '-'),\n" +
                "                                        '\",\"本次走行公里\":\"', if(bczxgl is not null, bczxgl, '-'),\n" +
                "                                        '\",\"下车型号\":\"', if(xcxh is not null, xcxh, '-'),\n" +
                "                                        '\",\"下车位置\":\"', if(xcwz is not null, xcwz, '-'),\n" +
                "                                        '\",\"上车型号\":\"', if(scxh is not null, scxh, '-'),\n" +
                "                                        '\",\"上车位置\":\"', if(scwz is not null, scwz, '-'),\n" +
                "                                        '\"}') as data_jxxx\n" +
                "                         from ranked_data\n" +
                "                         where rk = 1\n" +
                "                       )\n" +
                "select scxh, lbjbm, scwz, concat_ws('&', collect_set(data_jxxx)) as jxjld_jxxx\n" +
                "from formatted_data\n" +
                "group by scxh, lbjbm, scwz").createOrReplaceTempView("jxxx2");

        //s_train_id,cx,cx_id,ch,lj_id,lj,ljjc,psd_id,psd,psdjc,yyd_id,yyd,yydjc,zzc,zzrq,psrq,jcdllxmc,ljpxm,s_train_type,
        //ssxt,xtbm,xtpjm,flmc,fljc,flbm,lbjbm,pjflbm,wzmc,
        //gxmc,wz,pjdjm,pjxlh,zzrq,sccj,gzxh,changetime,
        //ljzxgl,gjxhzx,zjxhzx,
        //jcll_jxxx,jxjld_jxxx,JXXX
        // jxxx字段的值是 jcll_jxxx&jxjld_jxxx 的结果,数据如下:{"部件状态":"新造","生产厂家":"-","检修段":"-","日期":"2017-11-17","本次走行公里":"-","下车型号":"-","下车位置":"-","上车型号":"HXD1D-0034","上车位置":"1"}&{"部件状态":"C6修","生产厂家":"株洲中车时代电气股份有限公司","检修段":"株洲中车时代电气股份有限公司","日期":"2022-12-29 09:20:03","本次走行公里":"1337059","下车型号":"HXD1D-0034","下车位置":"2","上车型号":"HXD1D-0034","上车位置":"2"}
        spark.sql("" +
                "select distinct a1.*,a2.jxjld_jxxx,jxxxordergroupudf(a1.jcll_jxxx, a2.jxjld_jxxx) as jxxx " +
                "from ps_gxzd_jcll_zx_jxxx a1 left join jxxx2 a2 on a1.s_train_id = a2.scxh and a1.lbjbm = a2.lbjbm and a1.wz = a2.scwz").createOrReplaceTempView("ps_gxzz_jcll_zx_jxxx_all_dim");

        System.out.println("====== ps_gxzz_jcll_zx_jxxx_all_dim");
        spark.sql("select * from ps_gxzz_jcll_zx_jxxx_all_dim order by s_train_id").show(80, false);
        // TODO 2023-10-12 为拉取到的宽表进行缓存,后续都用这个 trainAttributesDim 进行操作
        // 进行缓存操作的写法: CACHE TABLE 表名称 . 后面跟一个行动算子触发一下缓存操作.
        // spark.sql("CACHE TABLE ps_gxzz_jcll_zx_jxxx_all_dim");
        sqlContext.cacheTable("ps_gxzz_jcll_zx_jxxx_all_dim");
        // 跟一个行动算子,触发一下缓存操作.
        spark.sql("select count(1) from ps_gxzz_jcll_zx_jxxx_all_dim");
        // spark.sql("UNCACHE TABLE ps_gxzz_jcll_zx_jxxx_all_dim"); // 释放掉缓存表中的数据
    }


    private static final String DEFAULT_VALUE = "-";
    private static final String DIAGNOSIS_TYPE = "模型诊断信息";


    private static String buildFilterCondition(String cxFilterSql, String bjpgmFilterSql, String otherFilterSql) {
        return "cx " + cxFilterSql + " and bjpjm " + bjpgmFilterSql + " " + otherFilterSql;
    }


    /***
     * 实时数据处理: tcms + aei + t_phm_mxxq 拉取宽表二,这个方法在每个 foreachRDD()方法内只能调用一次.这里调用一次的目的就是保证 aei 出入库数据的准确性,避免并行读取表数据导致数据不准确.
     * @param spark
     * @param stringJavaRDD spark 消费 kafka tcms 实时数据后形成的 JavaRDD
     * @param cxFilterSql 车型过滤条件的sql : in('HXD1D','HXD2')
     * @param bjpgmFilterSql 部件PJM过滤条件的sql : in ('PJ00033','PJ00177')
     * @param otherFilterSql 其他过滤条件 : and phm_lb != '-' and phm_lb is not null and phm_lb != ''
     */
    public static Dataset<Row> processRealTimeData(SparkSession spark, JavaRDD<String> stringJavaRDD, String cxFilterSql, String bjpgmFilterSql, String otherFilterSql) throws IOException {
        String filterCondition = buildFilterCondition(cxFilterSql, bjpgmFilterSql, otherFilterSql);

        // TCMS 实时数据中字段如下:
        //cx,ch,s_train_id,ssxt,xtbm,xtpjm,ssbj,bjbm,bjpjm,phm_bjwz,AB,phm_gzm,phm_lb,phm_mc,phm_zd,gzkssj_up,idx,gzkssj,
        // lj_id,lj,ljjc,psd_id,psd,psdjc,yyd_id,yyd,yydjc,zzc,zzrq,psrq,jcdllxmc,ljpxm,jxrq,rc,rt,
        // cmd_gzm,gzdm,gzdmshow,gzmc,source,zj_ms,zj_fa,cxdmc,ljzxgl_tcms,rjzx_tcms,xhzxgl_tcms,yyzl,zdlb,gzm
        spark.read().json(stringJavaRDD)
                .filter(expr(filterCondition))
                .selectExpr("cx", "ch", "s_train_id", "ssxt", "xtbm", "xtpjm", "ssbj", "bjbm", "bjpjm", "phm_bjwz", "AB",
                        "regexp_replace(phm_gzm, ' +', '') as phm_gzm_tcms", "phm_lb", "phm_mc", "phm_zd", "gzkssj_up", "idx", "gzkssj",
                        "lj_id", "lj", "ljjc", "psd_id", "psd", "psdjc", "yyd_id", "yyd", "yydjc", "zzc", "zzrq", "psrq", "jcdllxmc", "ljpxm", "jxrq", "rc", "rt",
                        "cmd_gzm", "gzdm", "gzdmshow", "gzmc", "source", "zj_ms", "zj_fa", "cxdmc",
                        "ljzxgl as ljzxgl_tcms", "rjzx as rjzx_tcms", "xhzxgl as xhzxgl_tcms")
                .withColumn("yyzl", lit(DEFAULT_VALUE))
                .withColumn("zdlb", lit(DIAGNOSIS_TYPE))
                .withColumn("gzm", col("phm_gzm_tcms"))
                .createOrReplaceTempView("tcmsEvent");
        System.out.println("==== tcmsEvent 实时数据");
        spark.sql("select * from tcmsEvent").show(20, false);

        // TODO 2023-10-12 添加去 Hbase 表中读取 aei 表中机车状态,这里去掉方法即可.
        MysqlUtils_3.loadTrainStatusFromPhoenix(spark, "in ('HXD2')", "aei_run");

        // 是用本地mysql中的 train_status 出 aei测试数据 . . .
        // MysqlUtils_3.readMysqlToTempTable(spark, "train_status");
        // spark.sql("select s_train_id, jccx as loco_type, ch as loco_no, train_status_time as cksj, train_status " +
        //         "from train_status where train_status = 0 and jccx = 'HXD2'").createOrReplaceTempView("aei_run");

        // 读取模型详情表字典表中的数据
        MysqlUtils_3.readMysqlToTempTable(spark, "t_phm_mxxq");
        spark.sql("" +
                "select mx_id,syjx,mxmc,bjwz,mxjg,bjpjm,jgms,xxms,czjy,zbjy,jxjy,yfyy,regexp_replace(phm_gzm, ' +', '') phm_gzm\n" +
                "from t_phm_mxxq\n" +
                "where syjx " + cxFilterSql + " and bjpjm " + bjpgmFilterSql + "").createOrReplaceTempView("mxxq");

        // 获取在线上跑的车的故障信息 关联 车的基本信息以及检修信息,获取后面用到的所有字段,为了防止消费到车在整备阶段报的故障信息
        /*
            cx_1,ch_1,s_train_id_1,ssxt_1,xtbm_1,xtpjm_1,ssbj,bjbm,bjpjm_1,phm_bjwz,AB,phm_gzm_tcms,phm_lb,phm_mc,phm_zd,gzkssj_up,idx,gzkssj,
            lj_id_1,lj_1,ljjc_1,psd_id_1,psd_1,psdjc_1,yyd_id_1,yyd_1,yydjc_1,zzc_1,zzrq_1,psrq_1,jcdllxmc_1,ljpxm_1,jxrq,rc,rt,cmd_gzm,gzdm,gzdmshow,gzmc,source,zj_ms,zj_fa,cxdmc,ljzxgl_tcms,rjzx_tcms,xhzxgl_tcms,yyzl,zdlb,gzm,

           loco_type,loco_no,cksj,train_status

            s_train_id_3,cx_3,ch_3,cx_id,lj_id_3,lj_3,ljjc_3,psd_id_3,psd_3,psdjc_3,yyd_id_3,yyd_3,yydjc_3,zzc_3,zzrq_3,psrq_3,jcdllxmc_3,ljpxm_3,s_train_type,
            ssxt_3,xtbm_3,xtpjm_3,flmc,fljc,flbm,lbjbm,pjflbm,wzmc,
            gxmc,wz,pjdjm,pjxlh,zzrq_4,sccj,gzxh,changetime,ljzxgl,gjxhzx,zjxhzx,jcll_jxxx,jxjld_jxxx,jxxx,
            mx_id,syjx,mxmc,mxjg,bjpjm_6,jgms,xxms,czjy,zbjy,jxjy,yfyy,phm_gzm
         */
        Dataset<Row> tcms_jcll_mxxq_DS = spark.sql("select " +
                        // TCMS实时故障中的字段
                        "t1.cx cx_1,t1.ch ch_1,t1.s_train_id s_train_id_1,t1.ssxt ssxt_1,t1.xtbm xtbm_1,t1.xtpjm xtpjm_1,ssbj,bjbm,t1.bjpjm bjpjm_1,phm_bjwz,AB,\n" +
                        "phm_gzm_tcms,phm_lb,phm_mc,phm_zd,gzkssj_up,idx,gzkssj,t1.lj_id lj_id_1,t1.lj lj_1,t1.ljjc ljjc_1,t1.psd_id psd_id_1,t1.psd psd_1,t1.psdjc psdjc_1,t1.yyd_id yyd_id_1,t1.yyd yyd_1,t1.yydjc yydjc_1," +
                        "t1.zzc zzc_1,t1.zzrq zzrq_1,t1.psrq psrq_1,t1.jcdllxmc jcdllxmc_1,t1.ljpxm ljpxm_1,jxrq,rc,rt,\n" +
                        "cmd_gzm,gzdm,gzdmshow,gzmc,source,zj_ms,zj_fa,cxdmc,ljzxgl_tcms,rjzx_tcms,xhzxgl_tcms,yyzl,zdlb,gzm,\n" +
                        // aei 中字段
                        "loco_type,loco_no,cksj,train_status,\n" +
                        // ps表中字段
                        "t3.s_train_id s_train_id_3,t3.cx cx_3,t3.cx_id as cx_id,t3.ch ch_3,t3.lj_id lj_id_3,t3.lj lj_3,t3.ljjc ljjc_3,t3.psd_id psd_id_3,t3.psd psd_3,t3.psdjc psdjc_3,t3.yyd_id yyd_id_3,t3.yyd yyd_3,t3.yydjc yydjc_3," +
                        "t3.zzc zzc_3,t3.zzrq_ps zzrq_3,t3.psrq psrq_3,t3.jcdllxmc jcdllxmc_3,t3.ljpxm ljpxm_3,s_train_type,\n" +
                        // 构型字典中的字段
                        "t3.ssxt ssxt_3,t3.xtbm xtbm_3,t3.xtpjm xtpjm_3,flmc,fljc,flbm,lbjbm,pjflbm,wzmc,\n" +
                        // jcll_1表中的字段
                        "gxmc,wz,pjdjm,pjxlh,t3.zzrq_jcll zzrq_4,sccj,gzxh,changetime,ljzxgl,gjxhzx,zjxhzx,jcll_jxxx,jxjld_jxxx,jxxx,\n" +
                        // 模型详情表中字段
                        "mx_id,syjx,mxmc,mxjg,t4.bjpjm bjpjm_6,jgms,xxms,czjy,zbjy,jxjy,yfyy,phm_gzm " +
                        "from tcmsEvent t1  " +
                        "join aei_run t2 on t1.s_train_id = concat(t2.loco_type, '-', t2.loco_no) " +
                        "join ps_gxzz_jcll_zx_jxxx_all_dim t3 on t1.s_train_id = t3.S_TRAIN_ID and t1.phm_bjwz = t3.WZ and t1.BJPJM = t3.PJFLBM  " +
                        "join mxxq t4 on t1.gzm = t4.phm_gzm and t1.cx = t4.syjx and t1.BJPJM = t4.bjpjm and t1.phm_bjwz = t4.bjwz")
                // .dropDuplicates()
                // 这里需要通过: 故障发生时间 >= 当前车的aei中出库时间 并且 当前出库时间 > 入库时间的数据
                .filter(expr("gzkssj > cksj"));

        // TODO 2023-10-12: tcms 实时数据关联维度数据后,不会存在太多,但是在下面会是用多次,这里做一个缓存操作.
        tcms_jcll_mxxq_DS.cache();
        tcms_jcll_mxxq_DS.count();
        System.out.println("= = = = tcms_jcll_mxxq_DS");
        tcms_jcll_mxxq_DS.show(60, false);
        return tcms_jcll_mxxq_DS;
    }


    /**
     * 从获取到的所有车的基本信息: ps + gxzd + jcll_1 + t_phm_mxxq+zx1,2,3 关联 tcms实时数据 + aei + t_phm_mxxq 形成的最终宽表 中获取 入 `ads_phm_fault`以及入`t_phm_sqwz`表中的数据.
     * 根据 `ads_phm_fault`表中的当前趟数据进行健康评估结果.并将健康评估结果进行返回.这个返回的健康评估结果还需要关联所有车的信息入 `parts_label`.
     *
     * @param spark
     * @param tcms_jcll_mxxq_DS 最终关联形成的宽表
     * @throws IOException
     */
    public static void commonAdsAndSqwxAndEvaluate(SparkSession spark, Dataset<Row> tcms_jcll_mxxq_DS, String cxFilterSql, String bjpgmFilterSql) throws IOException {
        /*
            TCMS kafka中的字段
            cx_1,ch_1,s_train_id_1,ssxt_1,xtbm_1,xtpjm_1,ssbj,bjbm,bjpjm_1,phm_bjwz,AB,phm_gzm_tcms,phm_lb,phm_mc,phm_zd,gzkssj_up,idx,gzkssj,
            lj_id_1,lj_1,ljjc_1,psd_id_1,psd_1,psdjc_1,yyd_id_1,yyd_1,yydjc_1,zzc_1,zzrq_1,psrq_1,jcdllxmc_1,ljpxm_1,jxrq,rc,rt,cmd_gzm,gzdm,gzdmshow,gzmc,source,zj_ms,zj_fa,
            cxdmc,ljzxgl_tcms,rjzx_tcms,xhzxgl_tcms,yyzl,zdlb,gzm,
            aei处理后的字段
            loco_type,loco_no,cksj,train_status
            ps相关字段
            s_train_id_3,cx_3,ch_3,cx_id,lj_id_3,lj_3,ljjc_3,psd_id_3,psd_3,psdjc_3,yyd_id_3,yyd_3,yydjc_3,zzc_3,zzrq_3,psrq_3,jcdllxmc_3,ljpxm_3,s_train_type,
            构型字典+mxxq表中系统以及部件内容
            ssxt_3,xtbm_3,xtpjm_3,flmc,fljc,flbm,lbjbm,pjflbm,wzmc,
            jcll_1处理后的字段
            gxmc,wz,pjdjm,pjxlh,zzrq_4,sccj,gzxh,changetime,ljzxgl,gjxhzx,zjxhzx,jcll_jxxx,jxjld_jxxx,jxxx,
            t_phm_mxxq表中所有的字段
            mx_id,syjx,mxmc,mxjg,bjpjm_6,jgms,xxms,czjy,zbjy,jxjy,yfyy,phm_gzm
         */
        Dataset<Row> adsPhmFaultDS = tcms_jcll_mxxq_DS.selectExpr(
                        "s_train_id_1 as s_train_id",
                        "idx AS s_fault_id",
                        "concat_ws('-',train_status,s_train_id_1, phm_bjwz, mx_id, date_format(cksj, 'yyyyMMddHHmmss')) AS mx_id",
                        "s_train_id_1 AS s_train_name",
                        "cx_1 AS s_train_type_code",
                        "source AS s_fault_source",
                        "gzkssj_up AS s_fault_time",
                        // 获取所属系统系统、系统编码、系统PJM、所属部件、部件编码、部件PJM.这里获取的逻辑: 如果当前实时数据传过来的字段没有值,则获取 gxzd + t_phm_mxxq 表中的字段值.
                        "case when ssxt_1 <> '' and ssxt_1 is not null then ssxt_1 else ssxt_3 end as s_fault_sys", // 所属系统 ssxt_1,ssxt_3
                        "case when xtbm_1 <> '' and xtbm_1 is not null then xtbm_1 else xtbm_3 end AS s_fault_sys_code1", // 系统编码 xtbm_1,xtbm_3
                        "case when xtpjm_1 <> '' and xtpjm_1 is not null then xtpjm_1 else xtpjm_3 end  AS s_fault_sys_code2", // 系统PJM xtpjm_1,xtpjm_3
                        "case when ssbj <> '' and ssbj is not null then ssbj else fljc end as s_fault_bw", // 所属部件 ssbj,fljc
                        "case when bjbm <> '' and bjbm is not null then bjbm else flbm end as s_fault_bw_code1", // 部件编码 bjbm,flbm
                        "case when bjpjm_1 <> '' and bjpjm_1 is not null then bjpjm_1 else lbjbm end as s_fault_bw_code2", // 部件PJM  bjpjm_1, lbjbm
                        "gzm AS s_fault_code",
                        "gzmc AS s_fault_name",
                        "phm_lb",
                        "mxjg",
                        "xxms AS phm_gzmc",
                        "phm_bjwz",
                        "zj_ms",
                        "zj_fa",
                        "pjxlh",
                        "pjdjm",
                        "1 AS s_htzt",
                        "0 AS s_hkzt",
                        "1 AS s_xfzt"
                )
                // TODO 2023-10-12 ads_phm_fault 表中 添加入库时间字段
                .withColumn("create_time", current_timestamp())
                .dropDuplicates();
        System.out.println("= = = = adsPhmFaultDS");
        adsPhmFaultDS.show(40, false);
        // 测试通过,入 ads_phm_fault 表中的数据没有问题.
        MysqlUtils_3.writeDataToMysqlTable(adsPhmFaultDS, "ads_phm_fault", SaveMode.Append);

        // 拉取当前批次中入 t_phm_sqwx 表中的字段. 这里需要进行聚合操作
        Dataset<Row> sqwxLocalBatchDS = tcms_jcll_mxxq_DS
                .selectExpr("concat_ws('-',train_status, s_train_id_1, phm_bjwz, mx_id, date_format(cksj, 'yyyyMMddHHmmss')) AS ID", "s_train_id_1 s_train_id",
                        "cx_1 as cx",
                        "ch_1 as ch",
                        // TODO 2023-10-12 事情维修表中添加 cx_id 这个字段.
                        "cx_id",
                        "case when lj_1 <> '' and lj_1 is not null then lj_1 else lj_3 end as lj",
                        "case when ljjc_1 <> '' and ljjc_1 is not null then ljjc_1 else ljjc_3 end as ljjc",
                        "case when lj_id_1 <> '' and lj_id_1 is not null then lj_id_1 else lj_id_3 end as ljm",
                        "case when psd_1 <> '' and psd_1 is not null then psd_1 else psd_3 end as psd",
                        "case when psdjc_1 <> '' and psdjc_1 is not null then psdjc_1 else psdjc_3 end as psdjc",
                        "case when psd_id_1 <> '' and psd_id_1 is not null then psd_id_1 else psd_id_3 end as psddm",
                        "case when ssxt_1 <> '' and ssxt_1 is not null then ssxt_1 else ssxt_3 end as ssxt", // 所属系统 ssxt_1,ssxt_3
                        "case when xtpjm_1 <> '' and xtpjm_1 is not null then xtpjm_1 else xtpjm_3 end  AS xtpjm", // 系统PJM xtpjm_1,xtpjm_3
                        "case when ssbj <> '' and ssbj is not null then ssbj else fljc end as ssbj", // 所属部件 ssbj,fljc
                        "case when bjpjm_1 <> '' and bjpjm_1 is not null then bjpjm_1 else lbjbm end as bjpjm", // 部件PJM  bjpjm_1, lbjbm
                        "phm_bjwz AS bjwz",
                        "pjxlh AS bjxlh",
                        "pjdjm AS bjdjm",
                        "mxjg AS mx",
                        // 2023-05-28 修改 表事情维修表中 mxmc 这个字段的取值,由以前的取模型详情表中的 jgms 这个字段,修改为:MXMC 这个字典的值.
                        //"jgms AS mxmc",
                        "mxmc",
                        "xxms AS mxms",
                        "current_timestamp AS create_time",
                        "czjy",
                        "1 AS xfzt",
                        "1 AS htzt",
                        "0 AS hkzt",
                        "'1' AS status",
                        "jcdllxmc_1 as dllx",
                        "rc",
                        "'-' AS yyzl",
                        "zzc_1 AS sccj",
                        "zzrq_1 zzrq",
                        "ljzxgl_tcms AS ljzx",
                        "gzm",
                        "SOURCE AS gzly",
                        "zdlb",
                        // 这里的发生时间,上次最后代码ch要求修改过,取的时间是 故障发生的最小时间.
                        "gzkssj_up as fssj",
                        "yfyy",
                        "zbjy AS hkzbjy",
                        "JXJY AS hkjxjy"
                );
        System.out.println("========== sqwxLocalBatchDS");
        sqwxLocalBatchDS.show(40, false);
        //id,s_train_id,cx,ch,lj,ljjc,ljm,psd,psdjc,psddm,ssxt,xtpjm,ssbj,bjpjm,bjwz,bjxlh,bjdjm,
        //mx,mxmc,mxms,create_time,czjy,xfzt,htzt,hkzt,status,dllx,rc,yyzl,sccj,zzrq,ljzx,gzm,gzly,zdlb,fssj,yfyy,hkzbjy,hkjxjy
        // 获取当前趟中 t_phm_sqwx 历史数据,和上面的当前批次中的数据进行 union,再进行关键字段的聚合,最终将当前执行结果更新写操作入表.
        MysqlUtils_3.readMysqlToTempTable(spark, "t_phm_sqwx");
        Dataset<Row> sqwxHistoryDS = spark.sql("select id, s_train_id, cx, ch, cx_id,lj, ljjc, ljm, psd, psdjc, psddm, ssxt, xtpjm, ssbj, bjpjm, bjwz, bjxlh, bjdjm,\n" +
                "       mx, mxmc, mxms, create_time, czjy, xfzt, htzt, hkzt, status, dllx, rc, yyzl, sccj, zzrq, ljzx, gzm, gzly, zdlb,\n" +
                "       fssj, yfyy, hkzbjy, hkjxjy\n" +
                "from t_phm_sqwx where hkzt = '0' and bjpjm " + bjpgmFilterSql + " and cx " + cxFilterSql + " ");
        System.out.println("= = = = = sqwxHistoryDS");
        sqwxHistoryDS.show(40, false);
        Dataset<Row> sqwxUpdataDS = sqwxLocalBatchDS.union(sqwxHistoryDS)
                .groupBy("id", "s_train_id", "cx", "ch", "cx_id", "lj", "ljjc", "ljm", "psd", "psdjc", "psddm", "ssxt", "xtpjm", "ssbj", "bjpjm", "bjwz", "bjxlh", "bjdjm", "xfzt", "htzt", "hkzt",
                        "status", "dllx", "rc", "yyzl", "sccj", "zzrq", "ljzx", "gzly", "zdlb")
                .agg(
                        concat_ws(",", collect_set(col("mx"))).alias("mx"),
                        concat_ws(",", collect_set(col("mxmc"))).alias("mxmc"),
                        concat_ws(",", collect_set(col("mxms"))).alias("mxms"),
                        concat_ws(",", collect_set(col("czjy"))).alias("czjy"),
                        concat_ws(",", collect_set(col("yfyy"))).alias("yfyy"),
                        concat_ws(",", collect_set(col("gzm"))).alias("gzm"),
                        concat_ws(",", collect_set(col("hkzbjy"))).alias("hkzbjy"),
                        concat_ws(",", collect_set(col("hkjxjy"))).alias("hkjxjy"),
                        min("fssj").alias("fssj")
                )
                .withColumn("create_time", current_timestamp())
                .withColumn("mx", expr("distinctString(mx)"))
                .withColumn("mxmc", expr("distinctString(mxmc)"))
                .withColumn("mxms", expr("distinctString(mxms)"))
                .withColumn("czjy", expr("distinctString(czjy)"))
                .withColumn("yfyy", expr("distinctString(yfyy)"))
                .withColumn("gzm", expr("distinctString(gzm)"))
                .withColumn("hkzbjy", expr("distinctString(hkzbjy)"))
                .withColumn("hkjxjy", expr("distinctString(hkjxjy)"));
        System.out.println(" = = = =  = sqwxUpdataDS");
        sqwxUpdataDS.show(40, false);
        // 写入 t_phm_sqwx 中数据
        MysqlUtils_3.upsertDatasetToMySQL(sqwxUpdataDS, "t_phm_sqwx");


        // 进行健康评估的处理: 这里拿牵引变流器中模型一:牵引变流器接触器卡合故障视情维修模型 这里来进行举例说明,HXD1C中牵引变流器中有两个位置,这里要对每个位置每个模型进行健康评估.
        // 如果是 牵引变流器接触器卡合故障视情维修模型的健康评估这里就是两为位置上的评估结果.
        // TODO 当前批次中模型报的故障信息,这里已经在上面进行了入库操作,这里不做处理,后续如果需要最终 union 统一入库,则这里打开
        // Dataset<Row> tcms_jkpgDs = tcms_jcll_mxxq_DS.selectExpr("s_train_id_1 as s_train_id", "bjpjm_1 as bjpjm", "phm_bjwz", "mx_id", "MXJG", "xxms");


        MysqlUtils_3.readMysqlToTempTable(spark, "ads_phm_fault");

        // 拉取 ads_phm_fault 表中当前趟中的历史数据,用来做 模型粒度的健康评估. ads_phm_fault,对于HXD2车,这里需要先通过故障名称这里来获取到每个模型的轴信息.最终处理后的结果如下
        /*
            +----------+-------+--------+-----+---------+-------------+------+---+
            |s_train_id|bjpjm  |phm_bjwz|mx_id|mxjg     |xxms         |zxx_wz|zxx|
            +----------+-------+--------+-----+---------+-------------+------+---+
            |HXD2-0001 |PJ00003|1       |60001|接触器卡合故障  |四象限1充电接触器卡合故障|1     |1轴 |
            |HXD2-0002 |PJ00003|1       |60016|主回路接地故障报警|1轴 逆变器输出接地   |1     |1轴 |
            |HXD2-0002 |PJ00003|4       |60016|主回路接地故障报警|4轴 逆变器输出接地   |4     |4轴 |
            |HXD2-0001 |PJ00003|1       |60004|水压异常故障   |四象限1水压异常     |1     |1轴 |
            |HXD2-0001 |PJ00003|3       |60014|网压欠压报警   |四象限1网压欠压     |1     |1轴 |
            |HXD2-0001 |PJ00003|1       |60001|接触器卡合故障  |四象限2充电接触器卡合故障|2     |2轴 |
            |HXD2-0002 |PJ00003|2       |60016|主回路接地故障报警|4轴 逆变器输出接地   |4     |4轴 |
            |HXD2-0001 |PJ00003|1       |60016|主回路接地故障报警|1轴 逆变器输出接地   |1     |1轴 |
         */
        Dataset<Row> ads_history_jkpgDs = spark.sql("select s_train_id, s_fault_bw_code2 as bjpjm, phm_bjwz, substring_index(substring_index(mx_id, '-', 5), '-', -1) as mx_id, mxjg,phm_gzmc as xxms\n" +
                        "from ads_phm_fault\n" +
                        "where s_hkzt = '0' and s_fault_bw_code2 " + bjpgmFilterSql + " and s_train_type_code " + cxFilterSql + " ")
                .filter(col("mx_id").notEqual("60003"))
                .withColumn("zxx_wz", expr("regexp_extract(xxms, '([0-9])', 1)"))
                .withColumn("zxx", expr("case\n" +
                        "    when phm_bjwz = '1' and  zxx_wz= '1' then '1轴'\n" +
                        "    when phm_bjwz = '1' and zxx_wz = '2' then '2轴'\n" +
                        "    when phm_bjwz = '2' and zxx_wz = '3' then '3轴'\n" +
                        "    when phm_bjwz = '2' and zxx_wz = '4' then '4轴'\n" +
                        "    when phm_bjwz = '3' and zxx_wz = '1' then '1轴'\n" +
                        "    when phm_bjwz = '3' and zxx_wz = '2' then '2轴'\n" +
                        "    when phm_bjwz = '4' and zxx_wz = '3' then '3轴'\n" +
                        "    when phm_bjwz = '4' and zxx_wz = '4' then '4轴'\n" +
                        "    else '' end\n"));
        System.out.println(" = = = = ads_history_jkpgDs");
        ads_history_jkpgDs.show(50, false);

        // S_TRAIN_ID,BJPJM,BJWZ,MXMC,ZXX,jgms
        // 先获取到每个模型每个位置上的健康度: 例如卡合这个模型这里,最终聚合结果会出现4条: 位置1、位置2、位置3、位置4.判断每个位置是否跨轴,如果跨轴就得D,如果不跨轴就是C.
        // = = = = = = = =  = jkpg_1 执行结束
        // +----------+----+-------+--------+-----+---------+---------------------------+-----+----+---------+----+--------------------------------------------------------+---------------------------------------------------------------------------------------------+
        //|s_train_id|cx  |bjpjm  |phm_bjwz|mx_id|mxjg     |xxms                       |zxx  |pgjg|zhpj_ztzl|mxzy|pgcszb                                                  |pgcszb_old                                                                                   |
        //+----------+----+-------+--------+-----+---------+---------------------------+-----+----+---------+----+--------------------------------------------------------+---------------------------------------------------------------------------------------------+
        //|HXD2-0001 |HXD2|PJ00003|1       |60001|接触器卡合故障  |四象限2充电接触器卡合故障,四象限1充电接触器卡合故障|2轴,1轴|D   |D        |故障  |{"模型ID":60001,"模型简称":"接触器卡合故障","模型结果":"D","模型转义":"故障"}  |{"模型ID":60001,"模型简称":"接触器卡合故障","模型结果":"D","模型结果描述":"四象限2充电接触器卡合故障,四象限1充电接触器卡合故障","模型转义":"故障"}|
        //|HXD2-0001 |HXD2|PJ00003|2       |60001|接触器卡合故障  |四象限3充电接触器卡合故障              |3轴   |C   |C        |异常  |{"模型ID":60001,"模型简称":"接触器卡合故障","模型结果":"C","模型转义":"异常"}  |{"模型ID":60001,"模型简称":"接触器卡合故障","模型结果":"C","模型结果描述":"四象限3充电接触器卡合故障","模型转义":"异常"}              |
        //|HXD2-0001 |HXD2|PJ00003|1       |60004|水压异常故障   |四象限1水压异常                   |1轴   |C   |C        |异常  |{"模型ID":60004,"模型简称":"水压异常故障","模型结果":"C","模型转义":"异常"}   |{"模型ID":60004,"模型简称":"水压异常故障","模型结果":"C","模型结果描述":"四象限1水压异常","模型转义":"异常"}                    |
        //|HXD2-0001 |HXD2|PJ00003|2       |60010|原边过流报警故障 |四象限3变压器原边过流                |3轴   |C   |C        |异常  |{"模型ID":60010,"模型简称":"原边过流报警故障","模型结果":"C","模型转义":"异常"} |{"模型ID":60010,"模型简称":"原边过流报警故障","模型结果":"C","模型结果描述":"四象限3变压器原边过流","模型转义":"异常"}               |
        //|HXD2-0001 |HXD2|PJ00003|2       |60011|四象限输入过流报警|四象限4输入过流                   |4轴   |C   |C        |异常  |{"模型ID":60011,"模型简称":"四象限输入过流报警","模型结果":"C","模型转义":"异常"}|{"模型ID":60011,"模型简称":"四象限输入过流报警","模型结果":"C","模型结果描述":"四象限4输入过流","模型转义":"异常"}                 |
        Dataset<Row> jkpgDs = ads_history_jkpgDs
                .groupBy("s_train_id", "bjpjm", "phm_bjwz", "mx_id", "MXJG")
                .agg(
                        collect_set(col("xxms")).as("xxms"),
                        concat_ws(",", collect_set(col("zxx"))).as("zxx")
                )
                // 对轴信息的聚合值是用逗号分隔,如果轴信息这里分割后数组的长度为1就是C,如果分隔后数组长度为2就是D.
                .withColumn("cx", split(col("s_train_id"), "-").getItem(0))
                .withColumn("pgjg", when(size(split(col("zxx"), ",")).equalTo(2), lit("D")).otherwise(lit("C")))
                .withColumn("ZHPJ_ZTZL", when(col("pgjg").equalTo("D"), lit("D")).otherwise("C"))
                .withColumn("xxms", concat_ws(",", col("xxms")))
                .withColumn("mxzy", when(col("pgjg").equalTo("D"), "故障").otherwise("异常"))
                // {"模型ID":60001,"模型简称":"接触器卡合故障","模型结果":"D","模型结果描述":"四象限2充电接触器卡合故障,四象限1充电接触器卡合故障","模型转义":"故障"}
                // {"模型ID":60001,"模型简称":"接触器卡合故障","模型结果":"D","模型转义":"故障"}
                .withColumn("PGCSZB", expr("CONCAT('{\"模型ID\":', mx_id, ',\"模型简称\":\"', mxjg, '\",\"模型结果\":\"', pgjg,'\",\"模型转义\":\"', mxzy, '\"}')"))
                // s_train_id,cx,bjpjm,phm_bjwz,mx_id,MXJG,xxms,zxx,pgjg,ZHPJ_ZTZL,mxzy,PGCSZB
                .withColumn("pgcszb_old", expr("CONCAT('{\"模型ID\":', mx_id, ',\"模型简称\":\"', mxjg, '\",\"模型结果\":\"', pgjg, '\",\"模型结果描述\":\"', xxms,'\",\"模型转义\":\"', mxzy, '\"}')"))
                .selectExpr("s_train_id", "cx", "bjpjm", "phm_bjwz", "mx_id", "mxjg", "xxms", "zxx", "pgjg", "zhpj_ztzl", "mxzy", "pgcszb", "pgcszb_old")
                .orderBy("s_train_id", "mx_id", "phm_bjwz");
        System.out.println("====== jkpgDs |  ads_phm_fault 表中历史数据健康评估结果 = = = = =");
        jkpgDs.show(60, false);
        jkpgDs.selectExpr("s_train_id", "cx", "phm_bjwz", "bjpjm", "mx_id", "mxjg", "pgjg", "mxzy").createOrReplaceTempView("jkpg_1");

        // 释放拉取的实时数据的宽表形成的临时表
        tcms_jcll_mxxq_DS.unpersist();
    }

    /***
     * 获取健康评估的结果,对健康评估的结果进行拼接操作.这里进来的数据是 ads_phm_fault表中最细粒度的数据.我
     * - 要先将长表进行补齐,如果有故障爆出来的,就是故障对应的模型以及健康评估结果.如果没有故障的模型这里,需要为每个车补充进来形成长表.这里以HXD2为例: 每个车上都要有25个模型的数据.
     */
    public static void writePartsLabel(SparkSession spark, SQLContext sqlContext, String cxIn, String bjpjmIn) throws IOException {

        // 存在健康评估的车,某个模型存在评估数据,或者当前车多个模型存在健康评估数据的评估结果聚合值.
        MysqlUtils_3.readMysqlToTempTable(spark, "t_phm_mxxq");
        System.out.println("==== jkpg_1 = = = = = = ");
        spark.sql("select * from jkpg_1").show(60, false);
        spark.sql("" +
                        "with t1 as (select distinct s_train_id from jkpg_1 where cx " + cxIn + ")," +
                        "t2 as (" +
                        "   select distinct syjx,bjwz,bjpjm, mx_id, concat('{\"模型ID\":', mx_id, ',\"模型简称\":\"', mxjg,'\",\"模型结果\":\"A\",\"模型转义\":\"正常\"}') as pgcszb " +
                        "   from t_phm_mxxq where syjx " + cxIn + " and bjpjm " + bjpjmIn + ")," +
                        "t3 as (" +
                        "   select s_train_id, cx,phm_bjwz, bjpjm, mx_id,pgjg, concat('{\"模型ID\":', mx_id, ',\"模型简称\":\"', mxjg, '\",\"模型结果\":\"', pgjg, '\",\"模型转义\":\"', mxzy,'\"}') as pgcszb_2 " +
                        "   from jkpg_1 where cx " + cxIn + " and bjpjm " + bjpjmIn + ")," +
                        "t4 as (" +
                        // 模型详情表左外关联 `ads_phm_fault` 表,如果关联到则为tcms实时故障关联到具体的模型,有评估结果以及评估参数,如果关联不到,则说明当前车当前位置这个模型没有故障,是正常的.评估结果为A评估参数拼接的也是正常的.
                        "   select t1.s_train_id, t2.syjx as cx,t2.bjwz, t2.bjpjm,if(t3.mx_id is not null,t3.pgjg,'A') as pgjg_res,if(t3.mx_id is not null,pgcszb_2,pgcszb) as pgcszb " +
                        "   from t1 cross join t2 " +
                        "   left join t3 on t1.s_train_id = t3.s_train_id and t2.syjx = t3.cx and t2.bjpjm = t3.bjpjm and t2.bjwz = t3.phm_bjwz and t2.mx_id = t3.mx_id " +
                        "   order by t1.s_train_id,t2.bjpjm,t2.mx_id)" +
                        "select s_train_id,cx,bjwz,bjpjm,concat_ws(',',collect_set(pgjg_res)) as ztpg,concat_ws('&',collect_list(pgcszb)) as pgcszb " +
                        "from t4 " +
                        "group by s_train_id,cx,bjwz,bjpjm"
                )
                // 每个车每个位置上25个模型的评估结果聚合值进行判断,这里的聚合结果在上面已经进行了去重操作,这里的评估结果就是有D总体评估就是D,有C评估结果就是C,否则就是A
                .withColumn("zhpj_ztzl",
                        expr(" CASE \n" +
                                "        WHEN instr(ztpg, 'D') > 0 THEN 'D'\n" +
                                "        WHEN instr(ztpg, 'C') > 0 THEN 'C'\n" +
                                "        ELSE 'A'\n" +
                                "    END"))
                .orderBy("s_train_id", "bjwz")
                .createOrReplaceTempView("pgcs");
        System.out.println("====== tcms 实时数据 pgcs  ====");
        spark.sql("select * from pgcs").show(60, false);


        // 这里是TCMS没有报过故障的车的所有模型的聚合结果,评估参数正常,整体评估A的结果.
        spark.sql("" +
                "with t1 as ( \n" +
                "  select distinct syjx, bjwz, bjpjm, mx_id,\n" +
                "         concat('{\"模型ID\":', mx_id, ',\"模型简称\":\"', mxjg,'\",\"模型结果\":\"A\",\"模型转义\":\"正常\"}') as pgcs\n" +
                "  from t_phm_mxxq \n" +
                "  where syjx " + cxIn + " \n" +
                "  order by syjx, bjwz, mx_id\n" +
                ")\n" +
                "select syjx, bjwz, bjpjm,\n" +
                "       concat_ws('&', collect_list(pgcs)) as pgcszb\n" +
                "from t1 \n" +
                "group by syjx, bjwz, bjpjm").createOrReplaceTempView("mxxq_A");
        System.out.println("==== mxxq_A = = = = =  =");
        spark.sql("select * from mxxq_A").show(30, false);

        // s_train_id,cx,sysfl,sysflcode1,sysflcode2,wz,pjflm,bjbm,pjflbm,pjdjm,pjxlh,ggxh_th,lj,lj_id,ljjc,ljpxm,psd,psd_id,jwdjc,sccj,zzrq,ljzxgl,gjxhzx,zjxhzx,jxxx
        // 这里需要补充的字段: id、pgcszb,zhpj_ztzl,updatatime,jxrq
        spark.sql("" +
                "select distinct s_train_id, cx,cx_id, ssxt as sysfl, xtbm as sysflcode1, xtpjm as sysflcode2, wz, fljc as pjflm, flbm as bjbm,\n" +
                "  lbjbm as pjflbm, pjdjm, pjxlh, gzxh as ggxh_th, lj, lj_id, ljjc, ljpxm, psd, psd_id, yydjc as jwdjc, sccj,\n" +
                "  case when zzrq_ps <> '' and zzrq_ps is not null then zzrq_ps else zzrq_jcll end as zzrq,\n" +
                "  ljzxgl, gjxhzx, zjxhzx, jxxx \n" +
                "from ps_gxzz_jcll_zx_jxxx_all_dim").createOrReplaceTempView("parts_label_base");
        System.out.println("==== parts_label_base 表中的数据 ");
        spark.sql("select * from parts_label_base order by s_train_id").show(100, false);

        // 最终便表关联并入 parts_label 数据获取: 所有车  `parts_label_base` join `mxxq_A`, 结果左外关联 `pgcs` TCMS存在故障的车,
        // 如果关联到,则 pgcszb,zhpj_ztzl 字段取 pgcs 这个表中的数据,正常的这两个字段则填充 mxxq_A 这个表中的数据

        Dataset<Row> partsLabelDsFinal = spark.sql("" +
                        "with t1 as (\n" +
                        "  select s_train_id, cx,cx_id, sysfl, sysflcode1, sysflcode2, wz, '牵引变流器' as pjflm, bjbm, pjflbm, pjdjm, pjxlh, ggxh_th, lj, lj_id, ljjc,\n" +
                        "       ljpxm, psd, psd_id, jwdjc, sccj, zzrq, ljzxgl, gjxhzx, zjxhzx, jxxx, pgcszb,'A' as zhpj_ztzl\n" +
                        "  from parts_label_base p join mxxq_A m on p.cx = m.syjx and p.wz = m.bjwz and p.pjflbm = m.bjpjm\n" +
                        ")\n" +
                        "select t1.s_train_id,t1.cx,cx_id, sysfl, sysflcode1, sysflcode2, wz, pjflm, bjbm, pjflbm, pjdjm, pjxlh, ggxh_th, lj, lj_id, ljjc,\n" +
                        "       ljpxm, psd, psd_id, jwdjc, sccj, zzrq, ljzxgl, gjxhzx, zjxhzx, jxxx,\n" +
                        "       if(p2.s_train_id is not null ,p2.pgcszb,t1.pgcszb) as pgcszb,\n" +
                        "       if(p2.s_train_id is not null ,p2.zhpj_ztzl,t1.zhpj_ztzl) as zhpj_ztzl\n" +
                        "from t1 left join pgcs p2 on t1.s_train_id = p2.s_train_id and t1.wz = p2.bjwz and t1.pjflbm = p2.bjpjm")
                .withColumn("id", expr("uuid()"))
                .withColumn("updatatime", date_format(expr("now()"), "yyyy-MM-dd HH:mm:ss").cast("string"))
                .withColumn("ZZRQ", col("ZZRQ").cast("string"));
        System.out.println(" = = = = parts_label");
        partsLabelDsFinal.show(80, false);
        // MysqlUtils_3.upsertDatasetToMySQL(partsLabelDsFinal, "parts_label");
        // 将表中所有缓存的临时表释放掉
        spark.sql("UNCACHE TABLE ps_gxzz_jcll_zx_jxxx_all_dim"); // 释放掉缓存表中的数据

        // 入 Hbase 表
        partsLabelDsFinal.write().mode("OverWrite").format("org.apache.phoenix.spark")
                .option("dirver", "org.apache.phoenix.jdbc.PhoenixDriver")
                .option("zkUrl", "jdbc:phoenix:JWGL-YY-T141149,JWGL-YY-T141150,JWGL-YY-T141151,JWGL-YY-T141152,JWGL-YY-V139150,JWGL-YY-V139151,JWGL-YY-V139152,JWGL-YY-V139153:2181:/hbase-unsecure")
                .option("table", "LOCOMOTIVE_ADS.PARTS_LABEL").save();
    }
}
