package zjs.dc.controller;

import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;

public class TRandomInspectDetail {
    public static void main(String[] args) {
        SparkSession spark = SparkSession.builder().appName("t_random_inspect_detail").enableHiveSupport().getOrCreate();
        spark.sparkContext().setLogLevel("ERROR");
        Dataset<Row> zs_data = spark.sql("select * from a_dc_ep_ods.zs_t_random_inspect_detail");
        long zs_count = zs_data.count();
        if(zs_count>0){
            zs_data.createOrReplaceTempView("zs_tab");
            //删除数据
            spark.sql("select RECORD_ID,\n" +
                    "DETAIL_ID,\n" +
                    "INSPECT_RES,\n" +
                    "INSPECT_ITEM,\n" +
                    "dates," +
                    "'-1' as isadd from a_dc_ep_dwi.t_random_inspect_detail where RECORD_ID in (select a.RECORD_ID from a_dc_ep_ods.zs_t_random_inspect_detail_del as a " +
                    "left join a_dc_ep_dwi.t_random_inspect_detail as b on a.RECORD_ID=b.RECORD_ID where b.RECORD_ID is null)").createOrReplaceTempView("res_delete");
            //新增数据
            spark.sql("select a.*,'1' as isadd from a_dc_ep_ods.zs_t_random_inspect_detail as a " +
                    "left join a_dc_ep_ods.zs_t_random_inspect_detail_del as b " +
                    "on a.RECORD_ID = b.RECORD_ID where b.RECORD_ID is null").createOrReplaceTempView("res_insert");
            //修改数据
            spark.sql("select a.*,'0' as isadd from a_dc_ep_ods.zs_t_random_inspect_detail as a " +
                    "inner join a_dc_ep_ods.zs_t_random_inspect_detail_del as b " +
                    "on a.RECORD_ID = b.RECORD_ID").createOrReplaceTempView("res_update");

            spark.sql("insert overwrite table a_dc_ep_incr.t_random_inspect_detail (select * from res_insert union all select * from res_update union all select * from res_delete)");

        }else {
            spark.sql("TRUNCATE TABLE a_dc_ep_incr.t_random_inspect_detail");
            System.out.println("中数本期数据涉及该表的数据为空。。。。。。");
        }
        spark.stop();
    }
}
