package zjs.dc.controller;

import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;

/**
 * @Class:
 * @Author: xyl
 * @Description: 针对企查查 t_register_detail 表和中数 t_case_filing 表进行数据映射开发
 * @Date: 2021/12/15 16:51
 */
public class TRegisterDetail {
    public static void main(String[] args) {
        SparkSession spark = SparkSession.builder().appName("t_register_detail").enableHiveSupport().getOrCreate();
        spark.sparkContext().setLogLevel("ERROR");
        Dataset<Row> zs_data = spark.sql("select * from a_dc_ep_ods.zs_t_case_filing_base");
        long zs_count = zs_data.count();
        if(zs_count>0){

            /**
             * t_register_detail
             */
            //中数--用“案号”来删除
            Dataset<Row> delete_data = spark.sql("select a.id,\n" +
                    "a.reason,\n" +
                    "a.case_no,\n" +
                    "a.regist_date,\n" +
                    "a.hold_date,\n" +
                    "a.finish_date,\n" +
                    "a.department,\n" +
                    "a.court,\n" +
                    "a.judger,\n" +
                    "a.assistant,\n" +
                    "a.case_type,\n" +
                    "a.case_status,\n" +
                    "a.prosecutor,\n" +
                    "a.appellee,\n" +
                    "a.outsider,\n" +
                    "b.jobid,\n" +
                    "'-1' as isadd \n" +
                    "from (select * from a_dc_ep_dwi.t_register_detail where case_no is not null and case_no !='')a  " +
                    "inner join (select * from a_dc_ep_ods.zs_t_case_filing_base_del where CASECODE is not null and CASECODE !='') b  " +
                    "on upper(trim(translate(translate(a.case_no,\"（\",\"(\"),\"）\",\")\"))) = upper(trim(translate(translate(b.CASECODE,\"（\",\"(\"),\"）\",\")\"))) ");
            delete_data.createOrReplaceTempView("t_register_detail_temp");
            spark.sql("insert overwrite table a_dc_ep_incr.t_register_detail select * from t_register_detail_temp");

            //原告被告临时表
            Dataset<Row> yuangao_beigao_data = spark.sql("select \n" +
                    "  b.docid,\n" +
                    "  translate(concat_ws('|',collect_list(if(b.type='原告',b.names,''))),\"|\",\"\") as pro,\n" +
                    "  translate(concat_ws('|',collect_list(if(b.type='被告',b.names,''))),\"|\",\"\") as app,\n" +
                    "  translate(concat_ws('|',collect_list(if(b.type='第三人',b.names,''))),\"|\",\"\") as outsider  \n" +
                    "from \n" +
                    "  (select \n" +
                    "    a.docid,a.type type ,concat_ws(',',collect_list(a.name)) names \n" +
                    "  from \n" +
                    "    (SELECT \n" +
                    "      docid,\n" +
                    "      name,\n" +
                    "      if(trim(TYPE_NAME) in('公诉人','原告','上诉人','申请人'),'原告',if(trim(TYPE_NAME) in('被告人','被告','被上诉人','被申请人'),'被告',if(TYPE_NAME like '%第三人%','第三人','其他'))) as type \n" +
                    "    FROM a_dc_ep_ods.zs_t_case_filing_party)a \n" +
                    "  group by a.docid,a.type)b \n" +
                    "group by b.docid");
            yuangao_beigao_data.createOrReplaceTempView("t_yuangao_beigao_temp");

            //中数-更新数据--“案号”关联
            Dataset<Row> update_data = spark.sql("SELECT\n" +
                    "b.id,\n" +
                    "a.CASE_REASON as reason,\n" +
                    "a.CASECODE as case_no,\n" +
                    "a.REGDATE as  regist_date,\n" +
                    "a.TRIAL_DATE as hold_date,\n" +
                    "a.SETTLEMENT_DATE as finish_date,\n" +
                    "a.DEPARTMENT as department,\n" +
                    "a.COURT_NAME as court,\n" +
                    "a.UNDERTAKER as judger,\n" +
                    "b.assistant,\n" +
                    "a.CASE_TYPE as case_type,\n" +
                    "a.CASE_PROGRESS as case_status,\n" +
                    "e.pro,\n" +
                    "e.app,\n" +
                    "e.outsider,\n" +
                    "a.jobid,\n" +
                    "'0' as isadd \n" +
                    "FROM " +
                    "  (SELECT * FROM a_dc_ep_ods.zs_t_case_filing_base WHERE casecode IS NOT NULL AND casecode !='') a " +
                    "left JOIN " +
                    "  (SELECT * FROM t_yuangao_beigao_temp) e " +
                    "on a.docid=e.docid " +
                    "INNER JOIN " +
                    "  (SELECT * FROM a_dc_ep_dwi.t_register_detail WHERE case_no IS NOT NULL AND case_no !='') b " +
                    "on upper(trim(translate(translate(a.casecode,\"（\",\"(\"),\"）\",\")\"))) = upper(trim(translate(translate(b.case_no,\"（\",\"(\"),\"）\",\")\"))) " );
            update_data.createOrReplaceTempView("t_register_detail_one");
            spark.sql("insert into table a_dc_ep_incr.t_register_detail select * from t_register_detail_one");

            //新增数据
            Dataset<Row> insert_data = spark.sql("SELECT\n" +
                    "a.docid,\n" +
                    "a.CASE_REASON as reason,\n" +
                    "a.CASECODE as case_no,\n" +
                    "a.REGDATE as  regist_date,\n" +
                    "a.TRIAL_DATE as hold_date,\n" +
                    "a.SETTLEMENT_DATE as finish_date,\n" +
                    "a.DEPARTMENT as department,\n" +
                    "a.COURT_NAME as court,\n" +
                    "a.UNDERTAKER as judger,\n" +
                    "'无' as assistant,\n" +
                    "a.CASE_TYPE as case_type,\n" +
                    "a.CASE_PROGRESS as case_status,\n" +
                    "e.pro,\n" +
                    "e.app,\n" +
                    "e.outsider,\n" +
                    "a.jobid,\n" +
                    "'1' as isadd \n" +
                    "FROM " +
                    "  (SELECT * FROM a_dc_ep_ods.zs_t_case_filing_base WHERE casecode IS NOT NULL AND casecode !='') a " +
                    "left JOIN " +
                    "  (SELECT * FROM t_yuangao_beigao_temp) e " +
                    "on a.docid=e.docid " +
                    "left JOIN " +
                    "  (SELECT * FROM a_dc_ep_dwi.t_register_detail WHERE case_no IS NOT NULL AND case_no !='') b " +
                    "on upper(trim(translate(translate(a.casecode,\"（\",\"(\"),\"）\",\")\"))) = upper(trim(translate(translate(b.case_no,\"（\",\"(\"),\"）\",\")\"))) " +
                    "where b.case_no is null ");
            insert_data.createOrReplaceTempView("t_register_detail_two");
            spark.sql("insert into table a_dc_ep_incr.t_register_detail select * from t_register_detail_two");
        }else {
            spark.sql("TRUNCATE TABLE a_dc_ep_incr.t_register_detail");
            System.out.println("中数本期数据涉及该表的数据为空。。。。。。");
        }

        spark.stop();
    }
}
