package zjs.dc.controller;

import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;

/**
 * @Class:
 * @Author: gaojian
 * @Description: 针对企查查t_eci_simplecancellation_dissent表和中数t_quick_cancel表进行数据映射开发
 * @Date: 2021/12/21
 */

public class TEciSimplecancellationDissent {
    public static void main(String[] args) {
        SparkSession spark = SparkSession.builder().appName("t_eci_simplecancellation_dissent").enableHiveSupport().getOrCreate();
        spark.sparkContext().setLogLevel("ERROR");
        Dataset<Row> zs_data = spark.sql("select * from a_dc_ep_ods.zs_t_quick_cancel");
        long zs_count = zs_data.count();
        if(zs_count>0){

            // 处理步骤说明 1.UPDATE 2.DELETE
            // 1.UPDATE
            Dataset<Row> update_data = spark.sql("SELECT \n" +
                    "md5(CONCAT(UPPER(TRIM(B.COMPANY_NAME)),UPPER(TRIM(A.CREDITCODE)),DATE_FORMAT(if(A.DISSENT_DATE is null or A.DISSENT_DATE = '','1900-01-01',A.DISSENT_DATE),'yyyy-MM-dd'))) AS ID,\n" +
                    "md5(CONCAT(UPPER(TRIM(B.COMPANY_NAME)),UPPER(TRIM(A.CREDITCODE)),DATE_FORMAT(if(A.DISSENT_DATE is null or A.DISSENT_DATE = '','1900-01-01',A.DISSENT_DATE),'yyyy-MM-dd'))) AS ASSISTANCE_ID,\n" +
                    "B.KEY_NO,\n" +
                    "B.COMPANY_ID,\n" +
                    "B.COMPANY_NAME,\n" +
                    "A.DISSENT_ORG AS DISSENT_PERSON,\n" +
                    "A.DISSENT_DATE AS DISSENT_DATE,\n" +
                    "A.DISSENT_DES AS DISSENT_CONTENT,\n" +
                    "A.JOBID, '0' AS ISADD \n" +
                    "FROM \n" +
                    "(select m.* from a_dc_ep_ods.zs_t_quick_cancel m where m.creditcode is not null and m.creditcode !='') A INNER JOIN \n" +
                    "(select k.key_no,k.company_id,k.company_name,k.credit_code from a_dc_ep_ods.t_eci_company k where k.credit_code is not null and k.credit_code !='') B \n" +
                    "ON UPPER(TRIM(B.CREDIT_CODE)) = UPPER(TRIM(A.CREDITCODE)) ");
            update_data.createOrReplaceTempView("tmp_t_eci_simplecancellation_dissent");
            spark.sql("insert overwrite table a_dc_ep_incr.t_eci_simplecancellation_dissent select * from tmp_t_eci_simplecancellation_dissent");


            // 2.DELETE
            spark.sql("select del.* from a_dc_ep_ods.zs_t_quick_cancel_del del \n" +
                    "left join a_dc_ep_ods.zs_t_quick_cancel i on i.entid = del.entid\n" +
                    "where i.entid is null and del.creditcode != '' and del.creditcode is not null").createOrReplaceTempView("ddd");
            spark.sql("select d.*,c.credit_code from a_dc_ep_dwi.t_eci_simplecancellation_dissent d " +
                    "inner join a_dc_ep_ods.t_eci_company c on d.company_id = c.company_id where c.credit_code !='' and c.credit_code is not null").createOrReplaceTempView("tmp_eci_simplecancellation_dissent");
            Dataset<Row> delete_data = spark.sql("" +
                    "select \n" +
                    "d.id,\n" +
                    "d.assistance_id,\n" +
                    "d.key_no,\n" +
                    "d.company_id,\n" +
                    "d.company_name,\n" +
                    "d.dissent_person,\n" +
                    "d.dissent_date,\n" +
                    "d.dissent_content,\n" +
                    "del.jobid,'-1' as isadd\n" +
                    "from ddd del\n" +
                    "inner join tmp_eci_simplecancellation_dissent d on upper(trim(d.credit_code)) = upper(trim(del.creditcode))");
            delete_data.createOrReplaceTempView("tmp_t_eci_simplecancellation_dissent2");
            spark.sql("insert into table a_dc_ep_incr.t_eci_simplecancellation_dissent select * from tmp_t_eci_simplecancellation_dissent2");
        }else {
            spark.sql("TRUNCATE TABLE a_dc_ep_incr.t_eci_simplecancellation_dissent");
            System.out.println("中数本期数据涉及该表的数据为空。。。。。。");
        }

        spark.stop();
    }
}
