package zjs.dc.controller;

import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;

/**
 * @Class:
 * @Author: xyl
 * @Description: 针对企查查 t_zhixing 表和中数 t_enforcement_per 表进行数据映射开发
 * @Date: 2021/12/15 16:51
 */
public class TZhixing {
    public static void main(String[] args) {
        SparkSession spark = SparkSession.builder().appName("t_zhixing").enableHiveSupport().getOrCreate();
        spark.sparkContext().setLogLevel("ERROR");
        Dataset<Row> zs_data = spark.sql("select * from a_dc_ep_ods.zs_t_enforcement_per");
        long zs_count = zs_data.count();
        if(zs_count>0){
            /**
             * t_zhixing
             */
            //获取中数删除数据表中的数据 --- 中数删除逻辑：此表需要使用“案号”来删除
            Dataset<Row> delete_data = spark.sql("select \n" +
                    "   a.id,a.key_no,a.company_id,a.company_name,a.zx_id,a.name,a.lian_date,a.an_no,a.execute_gov,a.biaodi,a.status,a.partycard_num,a.is_valid,a.name_and_key_no,a.org_type,'' as EXITDATE,b.jobid,'-1' as isadd  \n" +
                    "from (select * from a_dc_ep_dwi.t_zhixing where an_no is not null and an_no !='')a  \n" +
                    "inner join a_dc_ep_ods.zs_t_enforcement_per_del b  \n" +
                    "on upper(trim(translate(translate(a.an_no,\"（\",\"(\"),\"）\",\")\"))) = upper(trim(translate(translate(b.CASECODE,\"（\",\"(\"),\"）\",\")\"))) \n" +
                    "where b.CASECODE is not null and b.CASECODE !=''  \n");
            delete_data.createOrReplaceTempView("tmp_t_zhixing");
            spark.sql("insert overwrite table a_dc_ep_incr.t_zhixing select * from tmp_t_zhixing");

            //更新数据--“案号”关联
            Dataset<Row> update_data = spark.sql("SELECT " +
                    "       md5(concat_ws('',b.company_id,translate(translate(a.CASECODE,\"(\",\"（\"),\")\",\"）\"))),\n" +
                    "       b.key_no,\n" +
                    "       b.company_id,\n" +
                    "       b.company_name,\n" +
                    "       a.RECORD_ID as RECORD_ID,\n" +
                    "       a.INAMECLEAN,\n" +
                    "       a.REGDATECLEAN,\n" +
                    "       a.CASECODE,\n" +
                    "       a.COURTNAME,\n" +
                    "       a.EXECMONEY,\n" +
                    "       a.CASESTATE,\n" +
                    "       a.CARDNUMCLEAN,\n" +
                    "       if(substr(cast(current_timestamp() as string),1,10)>a.EXITDATE and length(a.EXITDATE) == 10,0,if(length(a.EXITDATE) != 10,'',1)),\n" +
                    "       b.name_and_key_no,\n" +
                    "       if(trim(a.type)=\"自然人\",'1','2'),\n" +
                    " a.EXITDATE, " +
                    "       a.JOBID,\n" +
                    "       \"0\" as isadd  " +
                    "FROM \n" +
                    "  (SELECT * FROM a_dc_ep_ods.zs_t_enforcement_per WHERE CASECODE IS NOT NULL AND CASECODE !='') a \n" +
                    "INNER JOIN \n" +
                    "  (SELECT * FROM a_dc_ep_dwi.t_zhixing WHERE an_no IS NOT NULL AND an_no !='') b \n" +
                    "on upper(trim(translate(translate(a.CASECODE,\"（\",\"(\"),\"）\",\")\"))) = upper(trim(translate(translate(b.an_no,\"（\",\"(\"),\"）\",\")\")))\n" +
                    "inner join \n" +
                    "  (SELECT * FROM a_dc_ep_ods.t_eci_company WHERE credit_code IS NOT NULL AND credit_code!='') c \n" +
                    "ON (upper(trim(a.CREDITCODE)) = upper(trim(c.credit_code)) and b.company_id=c.company_id )\n" );
            update_data.createOrReplaceTempView("tmp_t_zhixing1");
            spark.sql("insert into table a_dc_ep_incr.t_zhixing select * from tmp_t_zhixing1");

            //新增数据--“案号”关联
            Dataset<Row> insert_data = spark.sql("SELECT " +
                    "       md5(concat_ws('',b.company_id,translate(translate(a.CASECODE,\"(\",\"（\"),\")\",\"）\"))),\n" +
                    "       b.key_no,\n" +
                    "       b.company_id,\n" +
                    "       b.company_name,\n" +
                    "       a.RECORD_ID as RECORD_ID,\n" +
                    "       a.INAMECLEAN,\n" +
                    "       a.REGDATECLEAN,\n" +
                    "       a.CASECODE,\n" +
                    "       a.COURTNAME,\n" +
                    "       a.EXECMONEY,\n" +
                    "       a.CASESTATE,\n" +
                    "       a.CARDNUMCLEAN,\n" +
                    "       if(substr(cast(current_timestamp() as string),1,10)>a.EXITDATE and length(a.EXITDATE) == 10,0,if(length(a.EXITDATE) != 10,'',1)),\n" +
                    "       \"\",\n" +
                    "       if(trim(a.type)=\"自然人\",'1','2'),\n" +
                    " a.EXITDATE, " +
                    "       a.JOBID,\n" +
                    "       \"1\" as isadd \n" +
                    "FROM\n" +
                    " (SELECT * FROM a_dc_ep_ods.zs_t_enforcement_per WHERE CREDITCODE IS NOT NULL AND CREDITCODE !='' AND CASECODE IS NOT NULL AND CASECODE !='') a\n" +
                    "inner join \n" +
                    "  (SELECT * FROM a_dc_ep_ods.t_eci_company WHERE credit_code IS NOT NULL AND credit_code!='') b \n" +
                    "ON upper(trim(a.CREDITCODE)) = upper(trim(b.credit_code))\n" +
                    "left JOIN \n" +
                    " (SELECT * FROM tmp_t_zhixing1 WHERE RECORD_ID IS NOT NULL AND RECORD_ID!='') c \n" +
                    "ON a.RECORD_ID = c.RECORD_ID  \n" +
                    "where c.RECORD_ID is null \n");

            insert_data.createOrReplaceTempView("tmp_t_zhixing2");
            spark.sql("insert into table a_dc_ep_incr.t_zhixing select * from tmp_t_zhixing2");
        }else {
            spark.sql("TRUNCATE TABLE a_dc_ep_incr.t_zhixing");
            System.out.println("中数本期数据涉及该表的数据为空。。。。。。");
        }
        spark.stop();
    }
}
