package zjs.dc.controller;

import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;

/**
 * @Class:
 * @Author: kongcb
 * @Description: TODO
 * @Date: 2021/12/15 9:20
 */
public class TInvestInfo {
    public static void main(String[] args) {
        String weeks = args[0];
        SparkSession spark = SparkSession.builder().appName("t_investinfo").enableHiveSupport().getOrCreate();
        spark.sparkContext().setLogLevel("ERROR");
        Dataset<Row> zs_data = spark.sql("select * from a_dc_ep_all_ods.zs_t_eci_partner where weeks= '"+weeks+"' and isadd!='-1' and stock_type not like '%自然人%' and p_key_no not like 'p%' and p_key_no is not null and p_key_no!=''");
        long zs_count = zs_data.count();
        if(zs_count>0){
            spark.sql("select b.company_id,b.oper_name,b.regist_capi,b.regist_capi_value,b.regist_capi_unit,b.credit_code,c.tax_no,b.no,c.org_no,b.econ_kind,b.belong_org,b.province_code,b.province,c.image_url,b.phone_number,c.email,c.website " +
                    "from a_dc_ep_ods.t_eci_company b left join a_dc_ep_dwi.t_eci_companyattr c on b.company_id = c.company_id").createOrReplaceTempView("company_info");

            spark.sql("select a.id,a.p_key_no,a.p_key_no,a.stock_name,a.key_no,a.company_id,a.company_name,a.stock_percent,a.should_capi,a.shoud_date,b.oper_name,b.regist_capi,b.regist_capi_value,b.regist_capi_unit,b.credit_code,b.tax_no,b.no,b.org_no,b.econ_kind,b.belong_org,b.province_code,b.province,b.image_url,b.phone_number,b.email,b.website,a.dates,a.isadd " +
                    " from (select * from a_dc_ep_all_ods.zs_t_eci_partner where weeks='"+weeks+"' and isadd!='-1' and stock_type not like '%自然人%' and p_key_no not like 'p%' and p_key_no is not null and p_key_no!='') a left join company_info b on a.company_id = b.company_id").createOrReplaceTempView("result_data");

            spark.sql("insert overwrite table a_dc_ep_all_ods.t_investinfo partition(month='"+weeks.substring(0,6)+"',weeks='"+weeks+"') select * from result_data");

        }else {
            spark.sql("TRUNCATE TABLE a_dc_ep_incr.t_investinfo");
            System.out.println("中数本期数据涉及该表的数据为空。。。。。。");
        }
        spark.stop();
    }

}
