package zjs.dc.controller;

import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;

/**
 * @Class:
 * @Author: guoml
 * @Description: 针对企查查t_annual_basicinfo表和中数t_an_basic表进行数据映射开发
 * @Date: 2021/12/7 12:51
 */

public class TAnnualBasicinfo {
    public static void main(String[] args) {
        SparkSession spark = SparkSession.builder().appName("t_annual_basicinfo").enableHiveSupport().getOrCreate();
        spark.sparkContext().setLogLevel("ERROR");
        Dataset<Row> zs_data = spark.sql("select * from a_dc_ep_ods.zs_t_an_basic");
        long zs_count = zs_data.count();
        if(zs_count>0){

            //获取中数增量数据表中的新增数据（left join）
            Dataset<Row> insert_data = spark.sql("SELECT b.ANCHEID,\n" +
                    "       h.key_no,\n" +
                    "       h.company_id,\n" +
                    "       b.entname,\n" +
                    "       b.REGNO,\n" +
                    "       h.oper_name,\n" +
                    "       b.TEL,\n" +
                    "       b.POSTALCODE,\n" +
                    "       b.ADDR,\n" +
                    "       b.EMAIL,\n" +
                    "       '' as is_stock_right_transfer,\n" +
                    "       h.status,\n" +
                    "       case when i.num is not null and i.num>0 then '是' else '否' end as has_website,\n" +
                    "       '' as has_new_stock_or_by_stock,\n" +
                    "       b.EMPNUM as employee_count,\n" +
                    "       b.DEPENDENTENTNAME as belong_to,\n" +
                    "       g.ASSGRO as capital_amount,\n" +
                    "       '' as has_provide_assurance,\n" +
                    "       '' as operation_places,\n" +
                    "       '' as main_type,\n" +
                    "       concat(\"从\",h.term_start,\"到\",if(length(h.term_end)!=0,h.term_end,null)),\n" +
                    "       '' as if_content_same,\n" +
                    "       '' as different_content,\n" +
                    "       h.scope,\n" +
                    "       '' as approved_operation_item,\n" +
                    "       b.CREDITNO,\n" +
                    "       b.JOBID,\n" +
                    "       \"1\" as isadd\n" +
                    "FROM\n" +
                    "  (SELECT *\n" +
                    "   FROM a_dc_ep_ods.zs_t_an_basic\n" +
                    "   WHERE CREDITNO IS NOT NULL\n" +
                    "     AND CREDITNO !='') b\n" +
                    "LEFT JOIN\n" +
                    "  a_dc_ep_ods.zs_t_an_basic_capital g on b.ANCHEID = g.ANCHEID\n" +
                    "LEFT JOIN\n" +
                    "  (select ancheid,count(1) as num from a_dc_ep_ods.zs_t_an_websiteshop group by ancheid) i on b.ANCHEID = i.ANCHEID\n" +
                    "LEFT JOIN\n" +
                    "  a_dc_ep_ods.t_eci_company h ON upper(trim(h.credit_code)) = upper(trim(b.CREDITNO))\n" +
                    "     AND h.credit_code IS NOT NULL and h.credit_code!=''").union(spark.sql("SELECT b.ANCHEID,\n" +
                    "       h.key_no,\n" +
                    "       h.company_id,\n" +
                    "       b.entname,\n" +
                    "       b.REGNO,\n" +
                    "       h.oper_name,\n" +
                    "       b.TEL,\n" +
                    "       b.POSTALCODE,\n" +
                    "       b.ADDR,\n" +
                    "       b.EMAIL,\n" +
                    "       '' as is_stock_right_transfer,\n" +
                    "       h.status,\n" +
                    "       case when i.num is not null and i.num>0 then '是' else '否' end as has_website,\n" +
                    "       '' as has_new_stock_or_by_stock,\n" +
                    "       b.EMPNUM as employee_count,\n" +
                    "       b.DEPENDENTENTNAME as belong_to,\n" +
                    "       g.ASSGRO as capital_amount,\n" +
                    "       '' as has_provide_assurance,\n" +
                    "       '' as operation_places,\n" +
                    "       '' as main_type,\n" +
                    "       concat(\"从\",h.term_start,\"到\",if(length(h.term_end)!=0,h.term_end,null)),\n" +
                    "       '' as if_content_same,\n" +
                    "       '' as different_content,\n" +
                    "       h.scope,\n" +
                    "       '' as approved_operation_item,\n" +
                    "       b.CREDITNO,\n" +
                    "       b.JOBID,\n" +
                    "       \"1\" AS isadd\n" +
                    "FROM\n" +
                    "  (SELECT *\n" +
                    "   FROM a_dc_ep_ods.zs_t_an_basic\n" +
                    "   WHERE regno IS NOT NULL\n" +
                    "     AND regno !=''\n" +
                    "     AND (CREDITNO IS NULL\n" +
                    "          OR CREDITNO = '')) b\n" +
                    "LEFT JOIN\n" +
                    "  a_dc_ep_ods.zs_t_an_basic_capital g on b.ANCHEID = g.ANCHEID\n" +
                    "LEFT JOIN\n" +
                    "  (select ancheid,count(1) as num from a_dc_ep_ods.zs_t_an_websiteshop group by ancheid) i on b.ANCHEID = i.ANCHEID\n" +
                    "LEFT JOIN\n" +
                    "  a_dc_ep_ods.t_eci_company h on upper(trim(h.no)) = upper(trim(b.regno))\n" +
                    "     AND h.no IS NOT NULL and h.no!=''"));
            insert_data.createOrReplaceTempView("tmp_t_annual_basicinfo1");
            spark.sql("insert overwrite table a_dc_ep_incr.t_annual_basicinfo select * from tmp_t_annual_basicinfo1");
        }else {
            spark.sql("TRUNCATE TABLE a_dc_ep_incr.t_annual_basicinfo");
            System.out.println("中数本期数据涉及该表的数据为空。。。。。。");
        }

        spark.stop();
    }
}
