package zjs.dc.controller;

import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;

/**
 * @Class:
 * @Author: xyl
 * @Description: 针对企查查 t_judgement 表和中数 t_getexecutejudgmentinfo 表进行数据映射开发
 * @Date: 2021/12/16 17:17
 */
public class TJudgement {
    public static void main(String[] args) {
        SparkSession spark = SparkSession.builder().appName("t_judgement").enableHiveSupport().getOrCreate();
        spark.sparkContext().setLogLevel("ERROR");
        Dataset<Row> zs_data = spark.sql("select * from a_dc_ep_ods.zs_t_getexecutejudgmentinfo_base");
        long zs_count = zs_data.count();
        if(zs_count>0){
            /**
             * t_judgement 裁判文书
             */
            //获取中数删除数据表中的数据 --- 此表需要使用“案号”来删除
            Dataset<Row> delete_data = spark.sql("select \n" +
                    "a.id," +
                    "a.key_no," +
                    "a.company_id," +
                    "a.company_name," +
                    "a.case_id," +
                    "a.court," +
                    "a.case_name," +
                    "a.case_no," +
                    "a.case_type,\n" +
                    "a.case_role," +
                    "a.case_reason," +
                    "a.case_reason_type," +
                    "a.court_year," +
                    "a.court_month," +
                    "a.court_level," +
                    "a.submit_date," +
                    "a.province," +
                    "a.is_valid, \n" +
                    "a.defendant," +
                    "a.prosecutor," +
                    "a.update_date," +
                    "a.amount," +
                    "b.jobid," +
                    "'-1' as isadd  \n" +
                    "from (select * from a_dc_ep_dwi.t_judgement where case_no is not null and case_no !='')a  " +
                    "inner join (select * from a_dc_ep_ods.zs_t_getexecutejudgmentinfo_base_del where CASECODE is not null and CASECODE !='') b  " +
                    "on upper(trim(translate(translate(a.case_no,\"（\",\"\"),\"）\",\"\"))) = upper(trim(translate(translate(b.CASECODE,\"（\",\"\"),\"）\",\"\"))) ");
            delete_data.createOrReplaceTempView("tmp_t_judgement");
            spark.sql("insert overwrite table a_dc_ep_incr.t_judgement select * from tmp_t_judgement");

            Dataset<Row> linshi_data1 = spark.sql("select \n" +
                    "  t.docid,\n" +
                    "  concat_ws(',',collect_list(t.name)) as  yg \n" +
                    " FROM (select * from a_dc_ep_ods.zs_t_getexecutejudgmentinfo_party where type_name='原告')t group by t.docid");
            linshi_data1.createOrReplaceTempView("linshi_data_temp1");

            Dataset<Row> linshi_data2 = spark.sql("select \n" +
                    "  t.docid,\n" +
                    "  concat_ws(',',collect_list(t.name)) as  bg \n" +
                    " FROM (select * from a_dc_ep_ods.zs_t_getexecutejudgmentinfo_party where type_name='被告')t group by t.docid");
            linshi_data2.createOrReplaceTempView("linshi_data_temp2");

            Dataset<Row> update_data = spark.sql("SELECT " +
                    "		a.RECORD_ID," +
                    "		c.key_no," +
                    "		c.company_id," +
                    "		c.company_name," +
                    "       a.DOCID,\n" +
                    "       b.COURT_NAME,\n" +
                    "       b.CASE_TITLE,\n" +
                    "       b.CASECODE,\n" +
                    "       b.CASE_TYPE,\n" +
                    "       a.TYPE_NAME,\n" +
                    "       b.CASE_REASON,\n" +
                    "       b.CASE_REASON_CODE,\n" +
                    "       if(t2.lian_date is not null,substr(translate(cast(t2.lian_date as string),'-',''),1,4),''),\n" +
                    "       if(t2.lian_date is not null,substr(translate(cast(t2.lian_date as string),'-',''),1,6),''),\n" +
                    "       case when instr(b.COURT_NAME,'中级人民法院')>0 then '3' \n" +
                    "when instr(b.COURT_NAME,'高级人民法院')>0 then '4' \n" +
                    "when instr(b.COURT_NAME,'最高人民法院')>0 then '5' \n" +
                    "else '' end,\n" +
                    "       b.PUBLISH_DATE,\n" +
                    "       b.AREA,\n" +
                    "       \"1\",\n" +
                    "       e.bg,\n" +
                    "       d.yg,\n" +
                    "       b.JOBID,\n" +
                    "       \"\",\n" +
                    "       b.JOBID,\n" +
                    "       \"0\" as isadd  " +
                    "FROM " +
                    "  (SELECT * FROM a_dc_ep_ods.zs_t_getexecutejudgmentinfo_party WHERE CREDITCODE IS NOT NULL AND CREDITCODE !='' and docid IS NOT NULL AND docid !='' and length(jobid)<10) a " +
                    "INNER JOIN " +
                    "  (SELECT * FROM a_dc_ep_ods.zs_t_getexecutejudgmentinfo_base WHERE docid IS NOT NULL AND docid !='' and casecode IS NOT NULL AND casecode !='' and length(jobid)<10) b " +
                    "on a.docid=b.docid "+
                    "INNER JOIN " +
                    "  (SELECT * FROM a_dc_ep_ods.t_eci_company WHERE credit_code IS NOT NULL AND credit_code !='') c " +
                    "on a.CREDITCODE=c.credit_code " +
                    "left JOIN linshi_data_temp1 d on a.docid=d.docid " +
                    "left JOIN linshi_data_temp2 e on a.docid=e.docid " +
                    "left join (SELECT * FROM (select b1.*,ROW_NUMBER() OVER(PARTITION BY upper(trim(translate(translate(b1.case_no,\"（\",\"(\"),\"）\",\")\"))) ORDER BY b1.lian_date desc) num from a_dc_ep_dwi.t_courtnotice b1 where b1.case_no is not null and b1.case_no!='') b1 where b1.num=1)t2 on upper(trim(translate(translate(b.CASECODE,\"（\",\"(\"),\"）\",\")\"))) = upper(trim(translate(translate(t2.case_no,\"（\",\"(\"),\"）\",\")\")))" );

            update_data.createOrReplaceTempView("tmp_t_judgement1");
            spark.sql("insert into table a_dc_ep_incr.t_judgement select * from tmp_t_judgement1");

        }else {
            spark.sql("TRUNCATE TABLE a_dc_ep_incr.t_judgement");
            System.out.println("中数本期数据涉及该表的数据为空。。。。。。");
        }
        spark.stop();
    }
}
