package zjs.dc.controller;

import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;

/**
 * @Class:
 * @Author: gaojian
 * @Description: 针对企查查t_eci_liquidation表和中数t_brk_lqu表进行数据映射开发
 * @Date: 2021/12/20
 */

public class TEciLiquidation {
    public static void main(String[] args) {
        SparkSession spark = SparkSession.builder().appName("t_eci_liquidation").enableHiveSupport().getOrCreate();
        spark.sparkContext().setLogLevel("ERROR");
        Dataset<Row> zs_data = spark.sql("select * from a_dc_ep_ods.zs_t_brk_lqu");
        long zs_count = zs_data.count();
        if(zs_count>0){
            // 处理步骤说明 1.UPDATE 2.DELETE
            // 1.UPDATE

            Dataset<Row> update_data = spark.sql("SELECT \n" +
                    "md5(CONCAT(UPPER(TRIM(A.CREDITCODE)),UPPER(TRIM(B.COMPANY_NAME)))) AS ID,\n" +
                    "B.KEY_NO,\n" +
                    "B.COMPANY_ID,\n" +
                    "B.COMPANY_NAME,\n" +
                    "A.LIGPRINCIPAL AS LEADER,\n" +
                    "A.LIQMEN AS MEMBER,\n" +
                    "A.JOBID, '0' AS ISADD \n" +
                    "FROM \n" +
                    "(\n" +
                    "    select del.creditcode,l.* from a_dc_ep_ods.zs_t_brk_lqu l\n" +
                    "    inner join a_dc_ep_ods.zs_t_brk_lqu_del del on upper(trim(del.entid)) = upper(trim(l.entid))\n" +
                    "    where del.creditcode is not null and del.creditcode != '' \n" +
                    ") A INNER JOIN \n" +
                    "a_dc_ep_ods.t_eci_company B \n" +
                    "ON UPPER(TRIM(B.CREDIT_CODE)) = UPPER(TRIM(A.CREDITCODE)) ");
            update_data.createOrReplaceTempView("tmp_t_eci_liquidation");
            spark.sql("insert overwrite table a_dc_ep_incr.t_eci_liquidation select * from tmp_t_eci_liquidation");



            // 2.DELETE
            spark.sql("select d.*,c.credit_code from a_dc_ep_dwi.t_eci_liquidation d inner join a_dc_ep_ods.t_eci_company c on d.company_id = c.company_id").createOrReplaceTempView("tmp_eci_liquidation");

            Dataset<Row> delete_data = spark.sql("select \n" +
                    "l.id,\n" +
                    "l.key_no,\n" +
                    "l.company_id,\n" +
                    "l.company_name,\n" +
                    "l.leader,\n" +
                    "l.member,\n" +
                    "del.jobid,'-1' as isadd\n" +
                    "from (select del.* from a_dc_ep_ods.zs_t_brk_lqu_del del \n" +
                    "left join a_dc_ep_ods.zs_t_brk_lqu i on upper(trim(i.entid)) = upper(trim(del.entid))\n" +
                    "where i.entid is null and del.creditcode != '' and del.creditcode is not null) del\n" +
                    "inner join tmp_eci_liquidation l on upper(trim(l.credit_code)) = upper(trim(del.creditcode))");
            delete_data.createOrReplaceTempView("tmp_t_eci_liquidation2");
            spark.sql("insert into table a_dc_ep_incr.t_eci_liquidation select * from tmp_t_eci_liquidation2");
        }else {
            spark.sql("TRUNCATE TABLE a_dc_ep_incr.t_eci_liquidation");
            System.out.println("中数本期数据涉及该表的数据为空。。。。。。");
        }

        spark.stop();
    }
}
