package zjs.dc.controller;

import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;

/**
 * @Class:
 * @Author: gaojian
 * @Description: 针对企查查 t_eci_mpledge_pledge 表和中数t_mov_reg,t_mov_debt表进行数据映射开发
 * @Date: 2021/12/24 15:30
 */


public class TEciMpledgePledge {
    public static void main(String[] args) {
        SparkSession spark = SparkSession.builder().appName("t_eci_mpledge_pledge").enableHiveSupport().getOrCreate();
        spark.sparkContext().setLogLevel("ERROR");
        Dataset<Row> zs_data = spark.sql("select * from a_dc_ep_ods.zs_t_mov_debt");
        long zs_count = zs_data.count();
        if(zs_count>0){

            // 处理步骤说明 全删全补方式 1.UPDATE 2.DELETE
            // 1.UPDATE
            // SQL逻辑：定义更新集合 update_data
            //          1.从 dc_ep_ods 取得中数表全集 关联_del表 credit_code统一社会信用代码不为空 为集合A
            //          2.从 dc_ep_ods 取得企业基本信息全集 统一社会信用代码 不为空 为集合B
            //          3.集合A 和 集合B 通过统一社会信用代码，做INNER JOIN，注意要 UPPER 和 TRIM 函数
            //              JOB_ID, "0" as isadd 两个字段是要增加的
            //              ID 是重新算的md5（企业名称+动产抵押登记号），企业基本信息取qcc的
            //          4.update_data.createOrReplaceTempView 更新
            //          5.同步到目标表

            Dataset<Row> update_data = spark.sql("" +
                    "SELECT \n" +
                    "md5(CONCAT(UPPER(TRIM(B.COMPANY_NAME)),UPPER(TRIM(A.MAB_REGNO)))) AS ID,\n" +
                    "B.KEY_NO,\n" +
                    "B.COMPANY_ID,\n" +
                    "B.COMPANY_NAME,\n" +
                    "A.MAB_REGNO AS REGIST_NO,\n" +
                    "A.MAB_REG_ORG AS REGIST_OFFICE,\n" +
                    "A.MAB_REG_DATE AS REGIST_DATE,\n" +
                    "CONCAT(A.DEBT_SDATE,'至',A.DEBT_EDATE) AS ASSURANCE_SCOPE,\n" +
                    "A.MAB_DEBT_TYPE AS KIND,\n" +
                    "A.MAB_GUAR_AMT AS AMOUNT,\n" +
                    "A.MAB_DEBT_RANGE AS FULFILL_OBLIGATION,\n" +
                    "A.MAB_DEBT_RMK AS REMARK,\n" +
                    "A.JOBID, '0' AS ISADD \n" +
                    "FROM \n" +
                    "(\n" +
                    "    select b.creditcode,a.* from (\n" +
                    "        select r.*,d.DEBT_SDATE,d.DEBT_EDATE,d.MAB_DEBT_TYPE,d.MAB_DEBT_RANGE,d.MAB_DEBT_RMK \n" +
                    "        from a_dc_ep_ods.zs_t_mov_reg r\n" +
                    "        left join a_dc_ep_ods.zs_t_mov_debt d on UPPER(TRIM(d.mab_regno)) = UPPER(TRIM(r.mab_regno)) \n" +
                    "    ) a \n" +
                    "    inner join a_dc_ep_ods.zs_t_mov_info_del b on UPPER(TRIM(b.entid)) = UPPER(TRIM(a.entid))\n" +
                    "    where b.creditcode is not null and b.creditcode != ''\n" +
                    ") A INNER JOIN \n" +
                    "a_dc_ep_ods.t_eci_company B \n" +
                    "ON UPPER(TRIM(B.CREDIT_CODE)) = UPPER(TRIM(A.CREDITCODE)) ");
            update_data.createOrReplaceTempView("tmp_t_eci_mpledge_pledge");
            spark.sql("insert overwrite table a_dc_ep_incr.t_eci_mpledge_pledge select * from tmp_t_eci_mpledge_pledge");

            // 2.DELETE
            // SQL逻辑：定义 delete_data 集合
            //          1.从 dc_ep_ods 的 _del 取得要删除的全集 credit_code
            //          2.从 dc_ep_ods 取得企业基本信息全集 统一社会信用代码 credit_code 关联 取得 company_id
            //          3.关联qcc业务表 company_id
            //          4.delete_data.createOrReplaceTempView 完成插入
            //          9.同步到目标a_dc_ep_incr库表
            spark.sql("select d.*,c.credit_code from a_dc_ep_dwi.t_eci_mpledge_pledge d inner join a_dc_ep_ods.t_eci_company c on d.company_id = c.company_id")
                    .createOrReplaceTempView("tmp_table7");
            Dataset<Row> delete_data = spark.sql("" +
                    "select \n" +
                    "m.id,\n" +
                    "m.key_no,\n" +
                    "m.company_id,\n" +
                    "m.company_name,\n" +
                    "m.regist_no,\n" +
                    "m.regist_office,\n" +
                    "m.regist_date,\n" +
                    "m.assurance_scope,\n" +
                    "m.kind,\n" +
                    "m.amount,\n" +
                    "m.fulfill_obligation,\n" +
                    "m.remark,\n" +
                    "del.jobid,'-1' as isadd\n" +
                    "FROM\n" +
                    "(select del.* from a_dc_ep_ods.zs_t_mov_info_del del\n" +
                    "left join a_dc_ep_ods.zs_t_mov_info i on upper(trim(i.entid)) = upper(trim(del.entid))\n" +
                    "where i.entid is null and del.creditcode != '' and del.creditcode is not null) del\n" +
                    "inner join\n" +
                    "tmp_table7 m on upper(trim(m.credit_code)) = upper(trim(del.creditcode))");
            delete_data.createOrReplaceTempView("tmp_t_eci_mpledge_pledge2");
            spark.sql("insert into table a_dc_ep_incr.t_eci_mpledge_pledge select * from tmp_t_eci_mpledge_pledge2");
        }else {
            spark.sql("TRUNCATE TABLE a_dc_ep_incr.t_eci_mpledge_pledge");
            System.out.println("中数本期数据涉及该表的数据为空。。。。。。");
        }
        spark.stop();
    }
}
