package zjs.dc.controller;

import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;

/**
 * @Class:
 * @Author: gaojian
 * @Description: 针对企查查t_env_punishment_detail表和中数t_env_punishment表进行数据映射开发
 * @Date: 2021/12/20
 */

public class TEnvPunishmentDetail {
    public static void main(String[] args) {
        SparkSession spark = SparkSession.builder().appName("t_env_punishment_detail").enableHiveSupport().getOrCreate();
        spark.sparkContext().setLogLevel("ERROR");
        Dataset<Row> zs_data = spark.sql("select * from a_dc_ep_ods.zs_t_env_punishment");
        long zs_count = zs_data.count();
        if(zs_count>0){
            // punish_basis 没有对应成功


            // 处理步骤说明 1.UPDATE 2.DELETE
            // 1.UPDATE


            Dataset<Row> update_data = spark.sql("" +
                    "SELECT \n" +
                    "md5(CONCAT(UPPER(TRIM(A.CREDITCODE)),UPPER(TRIM(B.COMPANY_NAME)),UPPER(TRIM(A.DOCUMENT_NUM)),\n" +
                    "DATE_FORMAT(IF(A.PUNISH_DATE = '' OR A.PUNISH_DATE IS NULL,'1900-01-01',A.PUNISH_DATE),'yyyy-MM-dd'))) AS ID,\n" +
                    "B.KEY_NO,\n" +
                    "B.COMPANY_ID,\n" +
                    "B.COMPANY_NAME,\n" +
                    "md5(CONCAT(UPPER(TRIM(A.CREDITCODE)),UPPER(TRIM(B.COMPANY_NAME)),UPPER(TRIM(A.DOCUMENT_NUM)),\n" +
                    "DATE_FORMAT(IF(A.PUNISH_DATE = '' OR A.PUNISH_DATE IS NULL,'1900-01-01',A.PUNISH_DATE),'yyyy-MM-dd'))) AS ENVPUNISH_ID,\n" +
                    "A.DOCUMENT_NUM AS CASE_NO,\n" +
                    "A.ILLEGAL_TYPE AS ILLEGAL_TYPE,\n" +
                    "A.PUNISH_BASIS AS PUNISH_BASIS,\n" +
                    "A.PUNISH_MEASURES AS PUNISHMENT_RESULT,\n" +
                    "A.PUNISH_DATE,\n" +
                    "A.PUNISH_ORG AS PUNISH_GOV,\n" +
                    "A.RECTIFY_STATUS AS IMPLEMENTATION,\n" +
                    "A.ILLEGAL_TYPE AS PUNISH_REASON,\n" +
                    "A.JOBID, '0' AS ISADD \n" +
                    "FROM \n" +
                    "(\n" +
                    "    select p.* from a_dc_ep_ods.zs_t_env_punishment p where p.creditcode is not null and p.creditcode!='') A INNER JOIN \n" +
                    "a_dc_ep_ods.t_eci_company B \n" +
                    "ON UPPER(TRIM(B.CREDIT_CODE)) = UPPER(TRIM(A.CREDITCODE)) ");
            update_data.createOrReplaceTempView("tmp_t_env_punishment_detail");
            spark.sql("insert overwrite table a_dc_ep_incr.t_env_punishment_detail select * from tmp_t_env_punishment_detail");


            // 2.DELETE
            // SQL逻辑：定义 delete_data 集合
            //          1.从 xy_ods 取得企查查全集 为集合A
            //          2.从 dc_ep_ods 取得( 以_del结尾表为主表，left join原始表，找到删除的creditcode)  中数删除 全集 为集合B
            //          3.集合A 和 集合B 通过统一社会信用代码，做INNER JOIN，注意要 UPPER 和 TRIM 函数，再筛选企查查侧为空的 为集合C
            //                  取得企查查侧全部字段 并加
            //                  JOB_ID, "-1" as isadd 两个字段是要增加的
            //          4.delete_data.createOrReplaceTempView 完成插入
            //          9.同步到目标a_dc_ep_incr库表
            spark.sql("select d.*,c.credit_code from a_dc_ep_dwi.t_env_punishment_detail d inner join a_dc_ep_ods.t_eci_company c on d.company_id = c.company_id")
                    .createOrReplaceTempView("tmp_table7");
            Dataset<Row> delete_data = spark.sql("" +
                    "select \n" +
                    "p.id,\n" +
                    "p.key_no,\n" +
                    "p.company_id,\n" +
                    "p.company_name,\n" +
                    "p.envpunish_id,\n" +
                    "p.case_no,\n" +
                    "p.illegal_type,\n" +
                    "p.punish_basis,\n" +
                    "p.punishment_result,\n" +
                    "p.punish_date,\n" +
                    "p.punish_gov,\n" +
                    "p.implementation,\n" +
                    "p.punish_reason,\n" +
                    "del.jobid,'-1' as isadd\n" +
                    "from (select del.* from a_dc_ep_ods.zs_t_env_punishment_del del where del.creditcode is not null and del.creditcode != '') del\n" +
                    "inner join tmp_table7 p on upper(trim(p.credit_code)) = upper(trim(del.creditcode))").union(spark.sql( "select \n" +
                    "p.id,\n" +
                    "p.key_no,\n" +
                    "p.company_id,\n" +
                    "p.company_name,\n" +
                    "p.envpunish_id,\n" +
                    "p.case_no,\n" +
                    "p.illegal_type,\n" +
                    "p.punish_basis,\n" +
                    "p.punishment_result,\n" +
                    "p.punish_date,\n" +
                    "p.punish_gov,\n" +
                    "p.implementation,\n" +
                    "p.punish_reason,\n" +
                    "del.jobid,'-1' as isadd\n" +
                    "from (select del.* from a_dc_ep_ods.zs_t_env_punishment_del del where del.creditcode is null or del.creditcode = '') del\n" +
                    "left join a_dc_ep_ods.com_company d on del.entid = d.entid " +
                    "inner join tmp_table7 p on d.name = p.company_name"));
            delete_data.createOrReplaceTempView("tmp_t_env_punishment_detail2");
            spark.sql("insert into table a_dc_ep_incr.t_env_punishment_detail select * from tmp_t_env_punishment_detail2");
        }else {
            spark.sql("TRUNCATE TABLE a_dc_ep_incr.t_env_punishment_detail");
            System.out.println("中数本期数据涉及该表的数据为空。。。。。。");
        }

        spark.stop();
    }
}
