package zjs.dc.controller;

import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;

/**
 * @Class:
 * @Author: snn
 * @Description: 针对企查查t_eci_partner表数据映射开发
 * @Date: 2021/12/13 17:51
 */

public class TEciPartner {
    public static void main(String[] args) {
        SparkSession spark = SparkSession.builder().appName("t_eci_partner").enableHiveSupport().getOrCreate();
        spark.sparkContext().setLogLevel("ERROR");
        Dataset<Row> zs_data = spark.sql("select * from a_dc_ep_ods.zax_t_shareholder");
        long zs_count = zs_data.count();
        if (zs_count > 0) {
            //获取企业股东的增量数据
            spark.sql("select\n" +
                    "(case when s.investor_id !='' and s.investor_id is not null  then s.investor_id  else x.pid end ) as pid,\n" +
                    "s.eid,\n" +
                    "s.investor_name,\n" +
                    "s.proportion,\n" +
                    "s.capital_amount,\n" +
                    "s.update_time,\n" +
                    "s.data_state,\n" +
                    "translate(translate(s.capital_arr,'[',''),']','') as capital_arr,\n" +
                    "translate(translate(s.capital_actl_arr,'[',''),']','')  as capital_actl_arr,\n" +
                    "s.capital_actl_amount,\n" +
                    "translate(translate(y.ent_name,'（','('),'）',')') as ent_name ,\n" +
                    "y.credit_code \n" +
                    "from  a_dc_ep_ods.zax_t_shareholder  s\n" +
                    "inner join  (select ent_name,credit_code,uid from  a_dc_ep_dwi.zax_t_base where ent_name is not null) y on s.eid = y.uid \n" +
                    "left join (select * from  a_dc_ep_dwi.zax_t_pid where ename is not null and pname is not null ) x on y.ent_name = x.ename and  upper(s.investor_name) = upper(x.pname)\n" +
                    "where s.data_state != 1 ").createOrReplaceTempView("t_eci_partner_temps");


            spark.sql("select a.key_no,a.company_id, translate(translate(a.company_name,'（','('),'）',')') as company_name  from (select aa.*,ROW_NUMBER() OVER(PARTITION BY aa.company_name ORDER BY aa.dates DESC,aa.isadd DESC) num from a_dc_ep_ods.t_eci_company aa) a where a.num=1").createOrReplaceTempView("t_eci_company_temp");

            //获取新增，修改，删除企业股东    企业信息数据 -1 删除 0 更新  1 新增
            spark.sql("select \n" +
                    "(case when x.id !='' and x.id is not null  then x.id else md5(concat(s.ent_name,s.investor_name)) end ) as id,\n" +
                    "(case when y.key_no !=''  and y.key_no is not null  then y.key_no else md5(s.credit_code) end ) as key_no,\n" +
                    "(case when y.company_id !='' and y.company_id is not null  then y.company_id else md5(s.credit_code) end ) as company_id,\n" +
                    "s.ent_name as company_name,\n" +
                    "s.investor_name as stock_name,\n" +
                    "(case when x.stock_type !='' and x.stock_type is not null then x.stock_type else '无' end ) as stock_type,\n" +
                    "s.proportion as stock_percent,\n" +
                    "s.capital_amount as should_capi,\n" +
                    "get_json_object(s.capital_arr,'$.amomon') as should_capi_value,\n" +
                    "get_json_object(s.capital_arr,'$.unit') as should_capi_unit,\n" +
                    "get_json_object(s.capital_arr,'$.time') as shoud_date,\n" +
                    "get_json_object(s.capital_arr,'$.paymet') as invest_type,\n" +
                    "get_json_object(s.capital_actl_arr,'$.paymet') as invest_name,\n" +
                    "s.capital_actl_amount as real_capi,\n" +
                    "get_json_object(s.capital_actl_arr,'$.amomon') as real_capi_value,\n" +
                    "get_json_object(s.capital_actl_arr,'$.unit') as real_capi_unit,\n" +
                    "get_json_object(s.capital_actl_arr,'$.time') as capi_date,\n" +
                    "(case when x.p_key_no !='' and x.p_key_no is not null  then x.p_key_no  when s.pid !='' and s.pid is not null  then s.pid else concat('zsj',md5(s.investor_name)) end )   as p_key_no,\n" +
                    "date_format(s.update_time,'yyyyMMdd') as dates,\n" +
                    "(case when s.data_state ='2'  then '-1' when x.key_no !=''  then '0' else '1' end ) as isadd\n" +
                    "from  t_eci_partner_temps s\n" +
                    "inner join t_eci_company_temp  y ON  trim(s.ent_name)= trim(y.company_name)\n" +
                    "left join a_dc_ep_dwi.t_eci_partner x on x.company_id = y.company_id and " +
                    "trim(translate(translate(x.stock_name,'（','('),'）',')')) = trim(translate(translate(s.investor_name,'（','('),'）',')')) ").createOrReplaceTempView("t_eci_partner_end");

            spark.sql("insert overwrite table a_dc_ep_incr.t_eci_partner select * from t_eci_partner_end");
        }else {
            spark.sql("TRUNCATE TABLE a_dc_ep_incr.t_eci_partner");
            System.out.println("智侒信本期没有股东数据");
        }
        spark.stop();
    }
}
