package zjs.dc.controller;

import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;

/**
 * @Class:
 * @Author: kongcb
 * @Description: t_eci_industry表映射开发
 * @Date: 2021/12/15 9:18
 */
public class TEciIndustry {
    public static void main(String[] args) {
        SparkSession spark = SparkSession.builder().appName("t_eci_industry").enableHiveSupport().getOrCreate();
        spark.sparkContext().setLogLevel("ERROR");
        Dataset<Row> zs_data = spark.sql("select * from a_dc_ep_ods.zs_t_ent_basic");
        long zs_count = zs_data.count();
        if(zs_count>0){

            //中数数据处理
            spark.sql("SELECT a.entname," +
                    " a.entid,\n" +
                    "       a.creditcode,\n" +
                    "       a.regno,\n" +
                    "       a.orgcodes,\n" +
                    "       a.industryphy,\n" +
                    "       a.industryphyname,\n" +
                    "       if(length(trim(a.industryco))=2,a.industryco,'') as sub_industry_code,\n" +
                    "       if(length(trim(a.industryco))=2,a.industryconame,'') as sub_industry,\n" +
                    "       if(length(trim(a.industryco))=3,a.industryco,'') as middle_category_code,\n" +
                    "       if(length(trim(a.industryco))=3,a.industryconame,'') as middle_category,\n" +
                    "       if(length(trim(a.industryco))=4,a.industryco,'') as small_category_code,\n" +
                    "       if(length(trim(a.industryco))=4,a.industryconame,'') as small_category,\n" +
                    "       a.jobid " +
                    "FROM a_dc_ep_ods.zs_t_ent_basic a").createOrReplaceTempView("zs_industry_tmp");
            //企查查数据和主表关联去统一社会信用代码和注册号作为和中数表关联的字段。
            spark.sql("SELECT aa.*\n" +
                    "FROM\n" +
                    "  (SELECT a.*,\n" +
                    "          b.credit_code,\n" +
                    "          b.no\n" +
                    "   FROM a_dc_ep_dwi.t_eci_industry a\n" +
                    "   LEFT JOIN a_dc_ep_ods.t_eci_company b ON a.company_id = b.company_id\n" +
                    "   WHERE b.company_id IS NOT NULL ) aa\n" +
                    "WHERE (aa.credit_code IS NOT NULL\n" +
                    "       AND aa.credit_code != '')\n" +
                    "  OR (aa.no IS NOT NULL\n" +
                    "      AND aa.no != '')").createOrReplaceTempView("tmp_sum_company");

            //获取中数增量数据表中的更新数据（inner join）
            spark.sql("SELECT a.key_no,\n" +
                    " b.entid," +
                    "       a.company_id,\n" +
                    "       b.entname,\n" +
                    " b.industryphy," +
                    "b.industryphyname," +
                    "b.sub_industry_code," +
                    "b.sub_industry," +
                    "b.middle_category_code," +
                    "b.middle_category," +
                    "b.small_category_code," +
                    "b.small_category," +
                    "       b.JOBID,\n" +
                    "       \"0\" as isadd " +
                    "FROM\n" +
                    "  (SELECT *\n" +
                    "   FROM zs_industry_tmp\n" +
                    "   WHERE creditcode IS NOT NULL\n" +
                    "   AND creditcode !='') b\n" +
                    "INNER JOIN\n" +
                    "   tmp_sum_company a ON upper(trim(a.credit_code)) = upper(trim(b.creditcode))").createOrReplaceTempView("t_company1");

            spark.sql("SELECT a.key_no,\n" +
                    " b.entid," +
                    "       a.company_id,\n" +
                    "       b.entname,\n" +
                    " b.industryphy," +
                    "b.industryphyname," +
                    "b.sub_industry_code," +
                    "b.sub_industry," +
                    "b.middle_category_code," +
                    "b.middle_category," +
                    "b.small_category_code," +
                    "b.small_category," +
                    "       b.JOBID,\n" +
                    "       \"0\" as isadd " +
                    "FROM\n" +
                    "  (SELECT u.*\n" +
                    "   FROM zs_industry_tmp u\n" +
                    " left join t_company1 uu on u.entid = uu.entid " +
                    "   WHERE uu.entid is null and u.regno is not null and u.regno !='') b\n" +
                    "INNER JOIN\n" +
                    "  tmp_sum_company a ON regexp_replace(trim(a.no),'-','') = regexp_replace(trim(b.regno),'-','')").createOrReplaceTempView("t_company2");

            spark.sql("SELECT a.key_no,\n" +
                    " b.entid," +
                    "       a.company_id,\n" +
                    "       b.entname,\n" +
                    " b.industryphy," +
                    "b.industryphyname," +
                    "b.sub_industry_code," +
                    "b.sub_industry," +
                    "b.middle_category_code," +
                    "b.middle_category," +
                    "b.small_category_code," +
                    "b.small_category," +
                    "       b.JOBID,\n" +
                    "       \"0\" as isadd " +
                    "FROM\n" +
                    "  (SELECT u.*\n" +
                    "   FROM zs_industry_tmp u " +
                    "left join t_company1 uu on u.entid = uu.entid " +
                    "left join t_company2 uuu on uu.entid = uuu.entid and u.entid = uuu.entid " +
                    "   where uu.entid is null and uuu.entid is null ) b\n" +
                    " INNER JOIN\n" +
                    "  tmp_sum_company a ON trim(a.company_name) = trim(b.ENTNAME)").createOrReplaceTempView("t_company3");

            spark.sql("select distinct a.* from (select * from t_company1 union all select * from t_company2 union all select * from t_company3) a").createOrReplaceTempView("tmp_t_eci_company");
            spark.sql("insert overwrite table a_dc_ep_incr.t_eci_industry select " +
                    "key_no," +
                    "company_id," +
                    "entname," +
                    "industryphy," +
                    "industryphyname," +
                    "sub_industry_code," +
                    "sub_industry," +
                    "middle_category_code," +
                    "middle_category," +
                    "small_category_code," +
                    "small_category," +
                    "jobid," +
                    "isadd " +
                    "from tmp_t_eci_company");
            //获取中数增量数据表中的新增数据（left join）
            Dataset<Row> insert_data = spark.sql("SELECT md5(if(length(b.creditcode)=18,b.creditcode,b.entname)) as key_no,\n" +
                    "       md5(if(length(b.creditcode)=18,b.creditcode,b.entname)) as company_id,\n" +
                    " b.entid," +
                    " b.entname,\n" +
                    " b.industryphy," +
                    "b.industryphyname," +
                    "b.sub_industry_code," +
                    "b.sub_industry," +
                    "b.middle_category_code," +
                    "b.middle_category," +
                    "b.small_category_code," +
                    "b.small_category," +
                    "       b.JOBID,\n" +
                    "       \"1\" as isadd " +
                    "FROM\n" +
                    "  zs_industry_tmp b\n" +
                    "LEFT JOIN\n" +
                    "  tmp_t_eci_company a ON b.entid = a.entid\n" +
                    "WHERE a.entid IS NULL");
            insert_data.createOrReplaceTempView("tmp_t_eci_company1");
            spark.sql("insert into table a_dc_ep_incr.t_eci_industry select " +
                    "key_no," +
                    "company_id," +
                    "entname," +
                    "industryphy," +
                    "industryphyname," +
                    "sub_industry_code," +
                    "sub_industry," +
                    "middle_category_code," +
                    "middle_category," +
                    "small_category_code," +
                    "small_category," +
                    "jobid," +
                    "isadd " +
                    " from tmp_t_eci_company1");
            //获取中数删除数据表中的数据
            Dataset<Row> delete_data = spark.sql("select " +
                    "key_no," +
                    "company_id," +
                    "company_name," +
                    "industry_code," +
                    "industry," +
                    "sub_industry_code," +
                    "sub_industry," +
                    "middle_category_code," +
                    "middle_category," +
                    "small_category_code," +
                    "small_category," +
                    "dates," +
                    "'-1' as isadd from tmp_sum_company a inner join (select del.* from a_dc_ep_ods.zs_t_ent_basic_del del where del.creditcode is not null and del.creditcode !='') b on upper(trim(a.credit_code)) = upper(trim(b.creditcode))");
            delete_data.createOrReplaceTempView("tmp_t_eci_company2");
            spark.sql("insert into table a_dc_ep_incr.t_eci_industry select * from tmp_t_eci_company2");
        }else {
            spark.sql("TRUNCATE TABLE a_dc_ep_incr.t_eci_industry");
            System.out.println("中数本期数据涉及该表的数据为空。。。。。。");
        }
        spark.stop();
    }
}
