package zjs.dc.controller;

import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;


/**
 * @Class:
 * @Author: xyl
 * @Description: 针对企查查 t_courtnotice 表和中数 t_opennotice 表进行数据映射开发
 * @Date: 2021/12/24 15:30
 */
public class TCourtnotice {
    public static void main(String[] args) {
        SparkSession spark = SparkSession.builder().appName("t_courtnotice").enableHiveSupport().getOrCreate();
        spark.sparkContext().setLogLevel("ERROR");
        Dataset<Row> zs_data = spark.sql("select * from a_dc_ep_ods.zs_t_opennotice_base");
        long zs_count = zs_data.count();
        if(zs_count>0){
            /**
             * t_court 开庭公告
             */
            //获取中数删除数据表中的数据 --- 中数删除逻辑：此表需要使用“案号”来删除
            Dataset<Row> delete_data = spark.sql("select a.id,a.key_no,a.company_id,a.company_name,a.court_notice_id,a.lian_date,a.case_reason,a.prosecutor_list,a.defendant_list,a.execute_gov,\n" +
                    "a.case_no,a.province,a.is_valid,a.prosecutor,a.defendant,a.execute_unite,a.schedule_time,a.undertake_department,a.chief_judge,\n" +
                    "b.jobid, '-1' as isadd \n" +
                    "from (select * from a_dc_ep_dwi.t_courtnotice where case_no is not null and case_no !='')a  " +
                    "inner join (select * from a_dc_ep_ods.zs_t_opennotice_base_del where CASECODE is not null and CASECODE !='') b  " +
                    "on upper(trim(translate(translate(a.case_no,\"（\",\"(\"),\"）\",\")\"))) = upper(trim(translate(translate(b.CASECODE,\"（\",\"(\"),\"）\",\")\"))) ");
            delete_data.createOrReplaceTempView("tmp_t_courtnotice");
            spark.sql("insert overwrite table a_dc_ep_incr.t_courtnotice select * from tmp_t_courtnotice");

            //更新数据--“案号”关联 --- a_dc_ep_dwi.t_courtnotice 同一个案号涉及多家公司，多次开庭，修改的时候  关联变多
//        Dataset<Row> update_data = spark.sql("SELECT " +
//                "       a.RECORD_ID,\n" +
//                "       d.key_no,\n" +
//                "       d.company_id,\n" +
//                "       d.company_name,\n" +
//                "       b.docid,\n" +
//                "       concat(b.courted_date,\" \",b.open_time),\n" +
//                "       b.case_reason,\n" +
//                "       b.PLAINTIFF,\n" +
//                "       b.DEFENDANT,\n" +
//                "       b.court_name,\n" +
//                "       b.casecode,\n" +
//                "       b.provinces,\n" +
//                "       d.is_valid,\n" +
//                "       b.PLAINTIFF,\n" +
//                "       b.DEFENDANT,\n" +
//                "       b.court_room,\n" +
//                "       b.PUNISH_DATE,\n" +
//                "       d.undertake_department,\n" +
//                "       b.JUDGE,\n" +
//                "       b.JOBID,\n" +
//                "       \"0\" as isadd  " +
//                "FROM " +
//                "  (SELECT * FROM a_dc_ep_ods.zs_t_opennotice_party WHERE CREDITCODE IS NOT NULL AND CREDITCODE !='' and docid IS NOT NULL AND docid !='') a " +
//                "INNER JOIN " +
//                "  (SELECT * FROM a_dc_ep_ods.zs_t_opennotice_base WHERE docid IS NOT NULL AND docid !='') b " +
//                "on a.docid=b.docid "+
//                "INNER JOIN " +
//                "  (SELECT * FROM a_dc_ep_ods.t_eci_company WHERE credit_code IS NOT NULL AND credit_code !='') c " +
//                "on a.CREDITCODE=c.credit_code " +
//                "INNER JOIN " +
//                "  (SELECT * FROM a_dc_ep_dwi.t_courtnotice WHERE case_no IS NOT NULL AND case_no !='') d " +
//                "on upper(trim(translate(translate(b.CASECODE,\"（\",\"(\"),\"）\",\")\"))) = upper(trim(translate(translate(d.case_no,\"（\",\"(\"),\"）\",\")\"))) and c.company_id=d.company_id " );
            Dataset<Row> update_data = spark.sql("SELECT " +
                    "       a.RECORD_ID,\n" +
                    "       c.key_no,\n" +
                    "       c.company_id,\n" +
                    "       c.company_name,\n" +
                    "       b.docid,\n" +
                    "       concat_ws(\" \",b.courted_date,b.open_time),\n" +
                    "       b.case_reason,\n" +
                    "       b.PLAINTIFF,\n" +
                    "       b.DEFENDANT,\n" +
                    "       b.court_name,\n" +
                    "       b.casecode,\n" +
                    "       b.provinces,\n" +
                    "       '1',\n" +
                    "       b.PLAINTIFF,\n" +
                    "       b.DEFENDANT,\n" +
                    "       b.court_room,\n" +
                    "       b.PUNISH_DATE,\n" +
                    "       b.undertaker,\n" +
                    "       b.JUDGE,\n" +
                    "       b.JOBID,\n" +
                    "       \"0\" as isadd  " +
                    "FROM " +
                    "  (SELECT * FROM a_dc_ep_ods.zs_t_opennotice_party WHERE CREDITCODE IS NOT NULL AND CREDITCODE !='' and docid IS NOT NULL AND docid !='') a " +
                    "INNER JOIN " +
                    "  (SELECT * FROM a_dc_ep_ods.zs_t_opennotice_base WHERE docid IS NOT NULL AND docid !='') b " +
                    "on a.docid=b.docid "+
                    "INNER JOIN " +
                    "  (SELECT * FROM a_dc_ep_ods.t_eci_company WHERE credit_code IS NOT NULL AND credit_code !='') c " +
                    "on a.CREDITCODE=c.credit_code ");
            update_data.createOrReplaceTempView("t_courtnotice1");
            spark.sql("insert into table a_dc_ep_incr.t_courtnotice select * from t_courtnotice1");

            //新增数据--“案号”关联---case_no 关联不上的即新增数据
//        Dataset<Row> insert_data = spark.sql("SELECT " +
//                "       a.RECORD_ID,\n" +
//                "       c.key_no,\n" +
//                "       c.company_id,\n" +
//                "       c.company_name,\n" +
//                "       b.docid,\n" +
//                "       concat(b.courted_date,\" \",b.open_time),\n" +
//                "       b.case_reason,\n" +
//                "       b.PLAINTIFF,\n" +
//                "       b.DEFENDANT,\n" +
//                "       b.court_name,\n" +
//                "       b.casecode,\n" +
//                "       b.provinces,\n" +
//                "       \"\",\n" +
//                "       b.PLAINTIFF,\n" +
//                "       b.DEFENDANT,\n" +
//                "       b.court_room,\n" +
//                "       b.PUNISH_DATE,\n" +
//                "       \"\",\n" +
//                "       b.JUDGE,\n" +
//                "       b.JOBID,\n" +
//                "       \"1\" as isadd  " +
//                "FROM " +
//                "  (SELECT * FROM a_dc_ep_ods.zs_t_opennotice_party WHERE CREDITCODE IS NOT NULL AND CREDITCODE !='' and docid IS NOT NULL AND docid !='') a " +
//                "INNER JOIN " +
//                "  (SELECT * FROM a_dc_ep_ods.zs_t_opennotice_base WHERE docid IS NOT NULL AND docid !='') b " +
//                "on a.docid=b.docid "+
//                "INNER JOIN " +
//                "  (SELECT * FROM a_dc_ep_ods.t_eci_company WHERE credit_code IS NOT NULL AND credit_code !='') c " +
//                "on a.CREDITCODE=c.credit_code " +
//                "left JOIN " +
//                "  (SELECT * FROM a_dc_ep_dwi.t_courtnotice WHERE case_no IS NOT NULL AND case_no !='') d " +
//                "on upper(trim(translate(translate(b.CASECODE,\"（\",\"(\"),\"）\",\")\"))) = upper(trim(translate(translate(d.case_no,\"（\",\"(\"),\"）\",\")\"))) " +
//                "where d.case_no is null ");
//        insert_data.createOrReplaceTempView("t_courtnotice2");
//        spark.sql("insert into table a_dc_ep_incr.t_courtnotice select * from t_courtnotice2");
        }else {
            spark.sql("TRUNCATE TABLE a_dc_ep_incr.t_courtnotice");
            System.out.println("中数本期数据涉及该表的数据为空。。。。。。");
        }
        spark.stop();
    }
}
