package zjs.dc.controller;

import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;

public class TCourtZs {
    public static void main(String[] args) {
        SparkSession spark = SparkSession.builder().appName("t_court").enableHiveSupport().getOrCreate();
        spark.sparkContext().setLogLevel("ERROR");
        Dataset<Row> zs_data = spark.sql("select * from a_dc_ep_ods.zs_t_court_announcement");
        long zs_count = zs_data.count();
        if(zs_count>0){
            /**
             * t_court 法院公告
             */

            //获取中数删除数据表中的数据 --- 中数删除逻辑：公司的任何一条数据发生变化，先删除这个公司相关的该表的所有数据
            Dataset<Row> delete_data = spark.sql("select a.id,a.key_no,a.company_id,a.company_name,a.court_id,a.party,a.category,a.submit_date,a.publish_date,a.publish_page,\n" +
                    "a.court,'' as casecode,a.content,a.province,a.is_valid,a.court_year,a.name_key_no_collection,a.prosecutor_list,a.defendant_list, \n" +
                    "c.jobid,'-1' as isadd \n" +
                    "from (select * from a_dc_ep_dwi.zs_t_court where company_id is not null and company_id !='') a \n" +
                    "inner join (select * from a_dc_ep_ods.t_eci_company where credit_code is not null and credit_code !='') b \n" +
                    "on a.company_id=b.company_id \n" +
                    "inner join (select * from a_dc_ep_ods.zs_t_court_announcement_del where creditcode is not null and creditcode !='') c \n" +
                    "on upper(trim(b.credit_code)) = upper(trim(c.creditcode)) \n");
            delete_data.createOrReplaceTempView("tmp_t_court");
            spark.sql("insert overwrite table a_dc_ep_incr.zs_t_court select * from tmp_t_court");

            //获取中数增量 + 更新 数据
            Dataset<Row> update_data = spark.sql("SELECT " +
                    "       a.RECORD_ID,\n" +
                    "       b.key_no,\n" +
                    "       b.company_id,\n" +
                    "       b.company_name,\n" +
                    "       md5(a.DOCID),\n" +
                    "       a.NAME,\n" +
                    "       a.NOTICE_TYPE,\n" +
                    "       a.PUBLISH_DATE,\n" +
                    "       a.PUBLISH_DATE,\n" +
                    "       a.PUBLISH_PAGE,\n" +
                    "       a.COURT_NAME,\n" +
                    "       a.CASECODE,\n" +
                    "       a.CONTENT,\n" +
                    "       b.province,\n" +
                    "       '1',\n" +
                    "       substr(a.PUBLISH_DATE,0,4),\n" +
                    "       translate(a.case_title,'当事人：','') as name_key_no_collection,\n" +
                    "       c.prosecutor_list,\n" +
                    "       d.defendant_list,\n" +
                    "       a.JOBID,\n" +
                    "       \"0\" as isadd  " +
                    "FROM \n" +
                    "  (SELECT * FROM a_dc_ep_ods.zs_t_court_announcement WHERE CREDITCODE IS NOT NULL AND CREDITCODE !='' and DOCID IS NOT NULL AND DOCID !='') a \n" +
                    "INNER JOIN \n" +
                    "  (SELECT * FROM a_dc_ep_ods.t_eci_company WHERE credit_code IS NOT NULL AND credit_code!='') b \n" +
                    "ON upper(trim(a.creditcode)) = upper(trim(b.credit_code)) \n" +
                    "left JOIN \n" +
                    "  (select docid, cast(collect_set(name) as String) as prosecutor_list from a_dc_ep_ods.zs_t_court_announcement where DOCID IS NOT NULL AND DOCID !='' and type_name=\"原告\" group by docid) c " +
                    "ON a.DOCID = c.DOCID \n" +
                    "left JOIN \n" +
                    "  (select docid, cast(collect_set(name) as String) as defendant_list from a_dc_ep_ods.zs_t_court_announcement where DOCID IS NOT NULL AND DOCID !='' and type_name=\"被告\" group by docid) d " +
                    "ON a.DOCID = d.DOCID  ");
            update_data.createOrReplaceTempView("tmp_t_court1");
            spark.sql("insert into table a_dc_ep_incr.zs_t_court select * from tmp_t_court1");
        }else {
            spark.sql("TRUNCATE TABLE a_dc_ep_incr.zs_t_court");
            System.out.println("中数本期数据涉及该表的数据为空。。。。。。");
        }

        spark.stop();
    }
}
