package zjs.dc.controller;

import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;

/**
 * @Class:
 * @Author: kongcb
 * @Description: 针对企查查t_company_location表和中数t_ent_basic表进行数据映射开发
 * @Date: 2021/12/15 9:20
 */
public class TCompanyLocation {
    public static void main(String[] args) {
        SparkSession spark = SparkSession.builder().appName("t_company_location").enableHiveSupport().getOrCreate();
        spark.sparkContext().setLogLevel("ERROR");
        Dataset<Row> zs_data = spark.sql("select * from a_dc_ep_ods.zs_t_ent_basic");
        long zs_count = zs_data.count();
        if(zs_count>0){

            //企业经纬度表和主表关联获取统一社会信用代码
            spark.sql("SELECT a.*,\n" +
                    "       b.credit_code,\n" +
                    "       b.no " +
                    "FROM a_dc_ep_dwi.t_company_location a\n" +
                    "INNER JOIN a_dc_ep_ods.t_eci_company b ON a.company_id = b.company_id " +
                    "where (b.credit_code is not null and b.credit_code !='') or (b.no is not null and b.no !='')")
                    .createOrReplaceTempView("tmp_sum_company");
            //删除需要企查查经纬度表和中数表（a_dc_ep_ods.t_ent_basic_del）关联

            //获取中数增量数据表中的更新数据（inner join）
            spark.sql("SELECT " +
                    "b.entid," +
                    "       a.key_no,\n" +
                    "       a.company_id,\n" +
                    "       b.entname,\n" +
                    "       b.dom," +
                    "       b.REGORGPROVINCE," +
                    "       b.REGORGCITY," +
                    "       a.level," +
                    "       b.LON," +
                    "       b.LAT," +
                    "       a.create_date,\n" +
                    "       b.JOBID,\n" +
                    "       b.JOBID,\n" +
                    "       \"0\" as isadd " +
                    "FROM\n" +
                    "  (SELECT *\n" +
                    "   FROM a_dc_ep_ods.zs_t_ent_basic\n" +
                    "   WHERE creditcode IS NOT NULL\n" +
                    "   AND creditcode !='') b\n" +
                    "INNER JOIN\n" +
                    "   tmp_sum_company a ON upper(trim(a.credit_code)) = upper(trim(b.creditcode))").createOrReplaceTempView("t_company1");

            spark.sql("SELECT " +
                    "b.entid," +
                    "       a.key_no,\n" +
                    "       a.company_id,\n" +
                    "       b.entname,\n" +
                    "       b.dom," +
                    "       b.REGORGPROVINCE," +
                    "       b.REGORGCITY," +
                    "       a.level," +
                    "       b.LON," +
                    "       b.LAT," +
                    "       a.create_date,\n" +
                    "       b.JOBID,\n" +
                    "       b.JOBID,\n" +
                    "       \"0\" as isadd " +
                    "FROM\n" +
                    "  (SELECT u.*\n" +
                    "   FROM a_dc_ep_ods.zs_t_ent_basic u\n" +
                    " left join t_company1 uu on u.entid = uu.entid " +
                    "   WHERE uu.entid is null and u.regno is not null and u.regno !='') b\n" +
                    "INNER JOIN\n" +
                    "  tmp_sum_company a ON regexp_replace(trim(a.no),'-','') = regexp_replace(trim(b.regno),'-','')").createOrReplaceTempView("t_company2");

            spark.sql("SELECT " +
                    "b.entid," +
                    "       a.key_no,\n" +
                    "       a.company_id,\n" +
                    "       b.entname,\n" +
                    "       b.dom," +
                    "       b.REGORGPROVINCE," +
                    "       b.REGORGCITY," +
                    "       a.level," +
                    "       b.LON," +
                    "       b.LAT," +
                    "       a.create_date,\n" +
                    "       b.JOBID,\n" +
                    "       b.JOBID,\n" +
                    "       \"0\" as isadd " +
                    "FROM\n" +
                    "  (SELECT u.*\n" +
                    "   FROM a_dc_ep_ods.zs_t_ent_basic u " +
                    "left join t_company1 uu on u.entid = uu.entid " +
                    "left join t_company2 uuu on uu.entid = uuu.entid and u.entid = uuu.entid " +
                    "   where uu.entid is null and uuu.entid is null ) b\n" +
                    " INNER JOIN\n" +
                    "  tmp_sum_company a ON trim(a.company_name) = trim(b.ENTNAME)").createOrReplaceTempView("t_company3");

            spark.sql("select distinct a.* from (select * from t_company1 union all select * from t_company2 union all select * from t_company3) a").createOrReplaceTempView("tmp_t_eci_company");
            spark.sql("insert overwrite table a_dc_ep_incr.t_company_location select " +
                    "       key_no,\n" +
                    "       company_id,\n" +
                    "       entname,\n" +
                    "       dom," +
                    "       REGORGPROVINCE," +
                    "       REGORGCITY," +
                    "       level," +
                    "       LON," +
                    "       LAT," +
                    "       create_date,\n" +
                    "       JOBID,\n" +
                    "       JOBID,\n" +
                    "       isadd " +
                    " from tmp_t_eci_company");
            //获取中数增量数据表中的新增数据（left join）
            Dataset<Row> insert_data = spark.sql("SELECT " +
                    "       a.key_no,\n" +
                    "       a.company_id,\n" +
                    "       b.entname,\n" +
                    "       b.dom," +
                    "       b.REGORGPROVINCE," +
                    "       b.REGORGCITY," +
                    "       a.level," +
                    "       b.LON," +
                    "       b.LAT," +
                    "       a.create_date,\n" +
                    "       b.JOBID,\n" +
                    "       b.JOBID,\n" +
                    "       '1' as isadd " +
                    "FROM\n" +
                    "  a_dc_ep_ods.zs_t_ent_basic b\n" +
                    "LEFT JOIN\n" +
                    "  tmp_t_eci_company a ON b.entid = a.entid\n" +
                    "WHERE a.entid IS NULL");
            insert_data.createOrReplaceTempView("tmp_t_eci_company1");
            spark.sql("insert into table a_dc_ep_incr.t_company_location select " +
                    "key_no,\n" +
                    "company_id,\n" +
                    "entname,\n" +
                    "dom," +
                    "REGORGPROVINCE," +
                    "REGORGCITY," +
                    "level," +
                    "LON," +
                    "LAT," +
                    "create_date,\n" +
                    "JOBID,\n" +
                    "JOBID,\n" +
                    "isadd " +
                    "from tmp_t_eci_company1");

            spark.sql("select qcc.* from (select del.* from a_dc_ep_ods.zs_t_ent_basic_del del where del.creditcode is not null and del.creditcode !='') zs inner join tmp_sum_company qcc on upper(trim(qcc.credit_code)) = upper(trim(zs.creditcode))").createOrReplaceTempView("delete_tmp");
            spark.sql("insert into table a_dc_ep_incr.t_company_location select " +
                    "qcc.key_no," +
                    "qcc.company_id," +
                    "qcc.company_name," +
                    "qcc.address," +
                    "qcc.province," +
                    "qcc.city," +
                    "qcc.level," +
                    "qcc.lng," +
                    "qcc.lat," +
                    "qcc.create_date," +
                    "qcc.update_date," +
                    "qcc.dates," +
                    "'-1' as isadd from delete_tmp qcc");
        }else {
            spark.sql("TRUNCATE TABLE a_dc_ep_incr.t_company_location");
            System.out.println("中数本期数据涉及该表的数据为空。。。。。。");
        }
        spark.stop();
    }
}
