package zjs.dc.controller;

import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;

/**
 * @Class:
 * @Author: kongcb
 * @Description: TODO
 * @Date: 2021/12/15 9:22
 */
public class TEciSpotCheck {
    public static void main(String[] args) {
        SparkSession spark = SparkSession.builder().appName("t_eci_spotcheck").enableHiveSupport().getOrCreate();
        spark.sparkContext().setLogLevel("ERROR");
        Dataset<Row> zs_data = spark.sql("select * from a_dc_ep_ods.zs_t_spot_check");
        long zs_count = zs_data.count();
        if(zs_count>0){

            //中数取统一社会信用代码
            spark.sql("select zs.*,del.creditcode from a_dc_ep_ods.zs_t_spot_check zs " +
                    "left join a_dc_ep_ods.zs_t_spot_check_del del on zs.entid=del.entid where del.entid is not null and del.creditcode is not null and del.creditcode != ''").createOrReplaceTempView("zs_spotcheck");
            //企查查数据表取统一社会信用代码
            spark.sql("select a.*,b.credit_code from a_dc_ep_dwi.t_eci_spotcheck a " +
                    "left join a_dc_ep_ods.t_eci_company b on a.company_id = b.company_id where b.company_id is not null").createOrReplaceTempView("qcc_spotcheck");
            //更新
            spark.sql("insert overwrite table a_dc_ep_incr.t_eci_spotcheck select " +
                    "zs.record_id, " +
                    "qcc.key_no," +
                    "qcc.company_id," +
                    "qcc.company_name," +
                    "md5(zs.ISP_RESULT)," +
                    "zs.ISP_REGORG," +
                    "zs.ISP_TYPE," +
                    "zs.ISP_DATE," +
                    "zs.ISP_RESULT," +
                    "'' as remark," +
                    "zs.jobid," +
                    "'1' as isadd " +
                    " from zs_spotcheck zs " +
                    "left join (select a.* from a_dc_ep_ods.t_eci_company a where a.credit_code is not null and a.credit_code != '') qcc on upper(trim(zs.creditcode)) = upper(trim(qcc.credit_code))");
            //删除
            spark.sql("insert into table a_dc_ep_incr.t_eci_spotcheck select " +
                    "qcc.id," +
                    "qcc.key_no," +
                    "qcc.company_id," +
                    "qcc.company_name," +
                    "qcc.no," +
                    "qcc.executive_org," +
                    "qcc.type," +
                    "qcc.date," +
                    "qcc.consequence," +
                    "qcc.remark," +
                    "qcc.dates," +
                    "'-1' as isadd " +
                    " from (select a.* from a_dc_ep_ods.zs_t_spot_check_del a where a.creditcode is not null and a.creditcode !='') del inner join qcc_spotcheck qcc on upper(trim(del.creditcode)) = upper(trim(qcc.credit_code))");

        }else {
            spark.sql("TRUNCATE TABLE a_dc_ep_incr.t_eci_spotcheck");
            System.out.println("中数本期数据涉及该表的数据为空。。。。。。");
        }

        spark.stop();
    }
}
