package zjs.dc.controller;

import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;

/**
 * @Class:
 * @Author: xyl
 * @Description: 针对企查查t_eci_employee表数据映射开发
 * @Date: 2021/12/13 17:51
 */

public class TEciEmployeeZs {
    public static void main(String[] args) {
        SparkSession spark = SparkSession.builder().appName("t_eci_employee_zs").enableHiveSupport().getOrCreate();
        spark.sparkContext().setLogLevel("ERROR");
        Dataset<Row> zs_data = spark.sql("select * from a_dc_ep_ods.zs_t_managerinfo");
        long zs_count = zs_data.count();
        if (zs_count > 0) {
            //dwi中的t_eci_employee表数据关联获取credit_code
//            spark.sql("select a.*,b.credit_code from a_dc_ep_dwi.zs_t_eci_employee a left join a_dc_ep_ods.t_eci_company b on a.company_id = b.company_id where instr(b.status,'销') =0 ").createOrReplaceTempView("qcc_t_eci_employee");
            spark.sql("select a.*,b.credit_code from a_dc_ep_dwi.zs_t_eci_employee a left join a_dc_ep_ods.t_eci_company b on a.company_id = b.company_id ").createOrReplaceTempView("qcc_t_eci_employee");
            //获取zs_t_managerinfo_del表中所有creditcode不为空的数据
            spark.sql("select * from a_dc_ep_ods.zs_t_managerinfo_del where creditcode is not null and creditcode != ''").createOrReplaceTempView("zs_t_managerinfo_del_1");
            //获取zs_t_managerinfo_del表中所有creditcode为空的数据吗，并关联zs_t_managerinfo表获取企业名称
            spark.sql("select distinct b.entname from zs_t_managerinfo_del_1 a left join a_dc_ep_ods.zs_t_managerinfo b on a.entid = b.entid where (a.creditcode is null or a.creditcode = '') and b.entname is not null").createOrReplaceTempView("zs_t_managerinfo_del_2");
            //获取qcc_t_eci_employee表中所有需要删除的数据即zs_t_managerinfo_del表中所有数据
            spark.sql("select * from qcc_t_eci_employee where credit_code in (select creditcode from zs_t_managerinfo_del_1)").union(spark.sql("select * from qcc_t_eci_employee where company_name in (select entname from zs_t_managerinfo_del_2)")).createOrReplaceTempView("qcc_t_eci_employee_del");
            //将删除表中的数据插入到zs_t_eci_employee表中
            spark.sql("insert overwrite table a_dc_ep_incr.zs_t_eci_employee select id,key_no,company_id,company_name,name,job,p_key_no,dates,'-1' as isadd from qcc_t_eci_employee_del");
            //将zs_t_managerinfo表和qcc_t_eci_employee表中的数据进行关联，将zs_t_managerinfo表中的数据插入到zs_t_eci_employee表中
            spark.sql("select a.record_id as id,if(length(b.company_id)>20,b.company_id,md5(if(length(a.creditcode)=18,a.creditcode,a.entname))) as key_no,if(length(b.company_id)>20,b.company_id,md5(if(length(a.creditcode)=18,a.creditcode,a.entname))) as company_id,a.entname as company_name,a.PERNAME as name,a.POSITION as job,md5(concat(a.entname,a.PERNAME)) as p_key_no,a.jobid as dates,'1' as isadd from (select k.*,j.creditcode from a_dc_ep_ods.zs_t_managerinfo k left join a_dc_ep_ods.zs_t_managerinfo_del j on k.entid=j.entid where j.creditcode is not null and j.creditcode !='') a left join a_dc_ep_ods.t_eci_company b on a.creditcode = b.credit_code")
                    .union(spark.sql("select a.record_id as id,if(length(b.company_id)>20,b.company_id,md5(if(length(a.creditcode)=18,a.creditcode,a.entname))) as key_no,if(length(b.company_id)>20,b.company_id,md5(if(length(a.creditcode)=18,a.creditcode,a.entname))) as company_id,a.entname as company_name,a.PERNAME as name,a.POSITION as job,md5(concat(a.entname,a.PERNAME)) as p_key_no,a.jobid as dates,'1' as isadd from (select k.*,j.creditcode from a_dc_ep_ods.zs_t_managerinfo k left join a_dc_ep_ods.zs_t_managerinfo_del j on k.entid=j.entid where j.creditcode is null or j.creditcode ='') a left join a_dc_ep_ods.t_eci_company b on regexp_replace(trim(a.entname),'([(（）)])','') = regexp_replace(trim(b.company_name),'([(（）)])','') where instr(b.status,'销') =0 "))
                    .createOrReplaceTempView("zs_t_managerinfo_1");
            //将zs_t_managerinfo_1表中的数据插入到zs_t_eci_employee表中，一下group的操作是为了将同一个人的不用职位的多条数据进行合并。所以结果数据比中数的原始数据少是正常的
            spark.sql("insert into table a_dc_ep_incr.zs_t_eci_employee select md5(concat(company_name,name,job)),key_no,company_id,company_name,name,job,p_key_no,if(length(dates)==8,dates,replace(current_date(),'-','')),'1' as isadd from (select key_no,company_id,company_name,name,concat_ws('，',collect_set(job)) as job,p_key_no,dates,isadd from zs_t_managerinfo_1 group by key_no,company_id,company_name,name,p_key_no,dates,isadd)");
//            spark.sql("insert into table a_dc_ep_incr.zs_t_eci_employee select md5(concat(company_name,name,job)),key_no,company_id,company_name,name,job,p_key_no,if(length(dates)==8,dates,replace(current_date(),'-','')),'1' as isadd from (select key_no,company_id,company_name,name,concat_ws('，',collect_set(job)) as job,p_key_no,dates,isadd from zs_t_managerinfo_1)");

        }else {
            spark.sql("TRUNCATE TABLE a_dc_ep_incr.zs_t_eci_employee");
            System.out.println("中数本期数据涉及该表的数据为空。。。。。。");
        }
        spark.stop();
    }
}
