package zjs.dc.controller;

import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;

/**
 * @Class:
 * @Author: kongcb
 * @Description: 一家企业可有多条对应的行政许可记录，只能统一删除统一新增。
 * @Date: 2021/12/15 9:21
 */
public class TEciLicens {
    public static void main(String[] args) {
        SparkSession spark = SparkSession.builder().appName("t_eci_licens").enableHiveSupport().getOrCreate();
        spark.sparkContext().setLogLevel("ERROR");
        Dataset<Row> zs_data = spark.sql("select * from a_dc_ep_ods.zs_t_ot_license");
        long zs_count = zs_data.count();
        if(zs_count>0){

            //中数对应的行政许可表（a_dc_ep_ods.zs_t_ot_license）有统一社会信用代码（CREDITNO）和注册号（REGNO）
            //企查查行政许可-工商(a_dc_ep_dwi.t_eci_licens)表关联企业主表（a_dc_ep_ods.t_eci_company）取统一社会信用代码
            spark.sql("SELECT a.*,\n" +
                    "       b.credit_code,\n" +
                    "       b.no as register_no\n" +
                    "FROM a_dc_ep_dwi.t_eci_licens a\n" +
                    "LEFT JOIN a_dc_ep_ods.t_eci_company b ON a.company_id = b.company_id\n" +
                    "WHERE b.company_id IS NOT NULL").createOrReplaceTempView("qcc_license");
            //新增
            spark.sql("select zs.record_id as id," +
                    "qcc.key_no," +
                    "qcc.company_id," +
                    "qcc.company_name," +
                    "md5(zs.LICNAME_CN) as no," +
                    "zs.LICNAME_NO as licens_doc_no," +
                    "zs.LICNAME_CN," +
                    "zs.VALFROM," +
                    "zs.VALTO," +
                    "zs.LICANTH," +
                    "zs.LICITEM," +
                    "zs.jobid," +
                    "'1' as isadd " +
                    " from " +
                    "a_dc_ep_ods.zs_t_ot_license zs " +
                    "left join (select * from a_dc_ep_ods.t_eci_company where credit_code is not null and credit_code !='') qcc on upper(trim(zs.CREDITNO)) = upper(trim(qcc.credit_code))" +
                    " where qcc.credit_code is not null").createOrReplaceTempView("bbb");
            spark.sql("select zs.record_id as id," +
                    "qcc.key_no," +
                    "qcc.company_id," +
                    "qcc.company_name," +
                    "md5(zs.LICNAME_CN) as no," +
                    "zs.LICNAME_NO as licens_doc_no," +
                    "zs.LICNAME_CN," +
                    "zs.VALFROM," +
                    "zs.VALTO," +
                    "zs.LICANTH," +
                    "zs.LICITEM," +
                    "zs.jobid," +
                    "'1' as isadd " +
                    " from " +
                    "a_dc_ep_ods.zs_t_ot_license zs " +
                    "left join (select * from a_dc_ep_ods.t_eci_company where credit_code is  null or credit_code ='') qcc on zs.entname = qcc.company_name" +
                    " where qcc.company_name is not null").createOrReplaceTempView("aaa");
            spark.sql("insert overwrite table a_dc_ep_incr.t_eci_licens select distinct c.* from (select a.* from aaa a union all select b.* from bbb b) c");
            //删除
            spark.sql("insert into table a_dc_ep_incr.t_eci_licens select " +
                    "qcc.id," +
                    "qcc.key_no," +
                    "qcc.company_id," +
                    "qcc.company_name," +
                    "qcc.register_no," +
                    "qcc.licens_doc_no," +
                    "qcc.licens_doc_name," +
                    "qcc.validity_from," +
                    "qcc.validity_to," +
                    "qcc.licens_office," +
                    "qcc.licens_content," +
                    "qcc.dates," +
                    "'-1' as isadd from qcc_license qcc inner join (select a.* from a_dc_ep_ods.zs_t_ot_license_del a where a.creditcode is not null and a.creditcode !='') zs on upper(trim(qcc.credit_code)) = upper(trim(zs.creditcode))");

        }else {
            spark.sql("TRUNCATE TABLE a_dc_ep_incr.t_eci_licens");
            System.out.println("中数本期数据涉及该表的数据为空。。。。。。");
        }
        spark.stop();
    }
}
