package zjs.dc.controller;

import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;

/**
 * @Class:
 * @Author: xyl
 * @Description: TODO
 * @Date: 2024/01/10 10:30
 */
public class TLimitHighconsume {
    public static void main(String[] args) {
        SparkSession spark = SparkSession.builder().appName("t_limit_highconsume").enableHiveSupport().getOrCreate();
        spark.sparkContext().setLogLevel("ERROR");
        Dataset<Row> zs_data = spark.sql("select * from a_dc_ep_ods.zs_t_limit_high_spending_lists");
        long zs_count = zs_data.count();
        if(zs_count>0){
            //修改
            spark.sql("insert overwrite table a_dc_ep_incr.t_limit_highconsume select " +
                    "if(t2.company_name is null or length(t1.person_subjected_to_execution)<4,'1','2')," +
                    "t1.person_subjected_to_execution," +
                    "qcc.company_name," +
                    "qcc.company_id," +
                    "qcc.company_id," +
                    "''," +
                    "t1.date_case_filing," +
                    "translate(translate(t1.case_no,\"(\",\"（\"),\")\",\"）\")," +
                    "t1.court," +
                    "''," +
                    "t1.execution_applicant," +
                    "t3.key_no," +
                    "t3.company_id," +
                    "''," +
                    "t1.pubdate," +
                    "''," +
                    "''," +
                    "t1.jobid," +
                    "t1.jobid," +
                    "'0' as isadd " +
                    "from a_dc_ep_ods.zs_t_limit_high_spending_lists t1 " +
                    "inner join (select * from a_dc_ep_ods.t_eci_company where credit_code is not null and credit_code!='') qcc " +
                    "on upper(trim(t1.creditcode)) = upper(trim(qcc.credit_code)) " +
                    "left join (select b2.* from (select b1.*,ROW_NUMBER() OVER(PARTITION BY translate(translate(b1.company_name,\"（\",\"(\"),\"）\",\")\") ORDER BY length(b1.status) DESC,b1.dates DESC) num from xy_ods.qcc_t_eci_company b1)b2 where b2.num=1) t2 " +
                    "on translate(translate(t1.person_subjected_to_execution,\"（\",\"(\"),\"）\",\")\") = translate(translate(t2.company_name,\"（\",\"(\"),\"）\",\")\") " +
                    "left join (select b2.* from (select b1.*,ROW_NUMBER() OVER(PARTITION BY translate(translate(b1.company_name,\"（\",\"(\"),\"）\",\")\") ORDER BY length(b1.status) DESC,b1.dates DESC) num from xy_ods.qcc_t_eci_company b1)b2 where b2.num=1) t3 " +
                    "on translate(translate(t1.execution_applicant,\"（\",\"(\"),\"）\",\")\") = translate(translate(t3.company_name,\"（\",\"(\"),\"）\",\")\") ");

            //删除
            spark.sql("insert into table a_dc_ep_incr.t_limit_highconsume " +
                    "select t1.peopleenforcedtype,t1.peopleenforced,t1.relatedpartyname,t1.relatedpartycode,t1.relatedcompanyid,t1.gender,t1.filingtime,t1.casenumber,t1.courtname,t1.courtzip," +
                    "t1.executeapplyname,t1.executeapplycode,t1.executecompanyid,t1.subjectmatter,t1.infopubldate,t1.content,t1.linkaddress,t1.inserttime,t2.jobid,'-1' " +
                    "from (select * from a_dc_ep_dwi.t_limit_highconsume where casenumber is not null and casenumber!='')t1 " +
                    "inner join (select distinct case_no,jobid from a_dc_ep_ods.zs_t_limit_high_spending_lists_del )t2 " +
                    "on translate(translate(t1.casenumber,\"（\",\"(\"),\"）\",\")\") = translate(translate(t2.case_no,\"（\",\"(\"),\"）\",\")\")  " +
                    "left join (select distinct case_no from a_dc_ep_ods.zs_t_limit_high_spending_lists )t3 " +
                    "on translate(translate(t1.casenumber,\"（\",\"(\"),\"）\",\")\") = translate(translate(t3.case_no,\"（\",\"(\"),\"）\",\")\") " +
                    "where t3.case_no is null ");
        }else {
            spark.sql("TRUNCATE TABLE a_dc_ep_incr.t_limit_highconsume");
            System.out.println("中数本期数据涉及该表的数据为空。。。。。。");
        }
        spark.stop();
    }
}
