package zjs.dc.controller;

import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;

/**
 * @Class: TAnnualInvestinfo
 * @Author: kongcb
 * @Description: 针对企查查t_annual_investinfo表和中数t_an_investment表进行数据映射开发
 * @Date: 2023/10/31 10:07
 */

public class TAnnualInvestinfo {
    public static void main(String[] args) {
        SparkSession spark = SparkSession.builder().appName("t_annual_investinfo").enableHiveSupport().getOrCreate();
        spark.sparkContext().setLogLevel("ERROR");
        Dataset<Row> zs_data = spark.sql("select * from a_dc_ep_ods.zs_t_an_investment");
        long zs_count = zs_data.count();
        if (zs_count > 0) {
            spark.sql("select distinct * from (" +
                    "select id,company_id,company_name from a_dc_ep_dwi.t_annual_basicinfo " +
                    "union all " +
                    "select id,company_id,company_name from a_dc_ep_incr.t_annual_basicinfo) as a").createOrReplaceTempView("annual_basicinfo");
            //获取中数增量数据表中的新增数据（left join）
            spark.sql("insert overwrite table a_dc_ep_incr.t_annual_investinfo select \n" +
                    "a.record_id as id,\n" +
                    "a.ancheid as annual_report_id,\n" +
                    "b.company_id as key_no,\n" +
                    "b.company_id,\n" +
                    "b.company_name,\n" +
                    "'' as `no`,\n" +
                    "a.entname as name,\n" +
                    "if(a.CREDITNO is not null and a.CREDITNO!='',a.CREDITNO,a.regno) as reg_no,\n" +
                    "if(c.should_capi is null,'',c.should_capi) as should_capi,\n" +
                    "if(c.should_capi_value is null,'',c.should_capi_value) as should_capi_value,\n" +
                    "if(c.should_capi_unit is null,'',c.should_capi_unit) as should_capi_unit,\n" +
                    "if(c.investment_ratio is null,'',c.investment_ratio) as shareholding_ratio,\n" +
                    "a.jobid as dates,\n" +
                    "if(c.company_name is null,'1','0') as isadd \n" +
                    "from a_dc_ep_ods.zs_t_an_investment a \n" +
                    "left join a_dc_ep_ods.zs_t_an_investment_del d\n" +
                    "on a.ancheid = d.ancheid\n" +
                    "left join\n" +
                    "annual_basicinfo as b on \n" +
                    "a.ancheid=b.id \n" +
                    "left join \n" +
                    "(select f.*,e.`year` from a_dc_ep_dwi.t_annual_partner as f \n" +
                    "left join a_dc_ep_dwi.t_annual_report e on f.annual_report_id=e.id) as c on\n" +
                    "regexp_replace(trim(c.name),'([(（）)])','') = regexp_replace(trim(b.company_name),'([(（）)])','')\n" +
                    "and\n" +
                    "regexp_replace(trim(c.company_name),'([(（）)])','') = regexp_replace(trim(a.entname),'([(（）)])','')\n" +
                    "and d.ancheyear=replace(c.`year`,'年度报告','')");
            //获取删除的数据
            spark.sql("insert into table a_dc_ep_incr.t_annual_investinfo select " +
                    "id," +
                    "annual_report_id," +
                    "key_no," +
                    "company_id," +
                    "company_name," +
                    "`no`," +
                    "name," +
                    "reg_no," +
                    "should_capi," +
                    "should_capi_value," +
                    "should_capi_unit," +
                    "shareholding_ratio," +
                    "dates," +
                    "'-1' isadd " +
                    "from a_dc_ep_dwi.t_annual_investinfo where annual_report_id in (select distinct ancheid from a_dc_ep_ods.zs_t_an_investment_del)");
        } else {
            spark.sql("TRUNCATE TABLE a_dc_ep_incr.t_annual_investinfo");
            System.out.println("中数本期数据涉及该表的数据为空。。。。。。");
        }
        spark.stop();
    }
}
