package zjs.dc.controller;

import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;

/**
 * @Class:
 * @Author: guoml
 * @Description: 针对企查查t_ipo_company_toptenholder表和聚源LC_MainSHListNew，LC_STIBMainSHList表进行数据映射开发
 * @Date: 2021/12/7 12:51
 */

public class TIpoCompanyToptenholder {
    public static void main(String[] args) {
        SparkSession spark = SparkSession.builder().appName("t_ipo_company_toptenholder").enableHiveSupport().getOrCreate();
        spark.sparkContext().setLogLevel("ERROR");
        //获取中数增量数据表中的新增数据（left join）
        Dataset<Row> insert_data = spark.sql("SELECT md5(concat(100,a.ID)) as ID,\n" +
                "       b.SecuCode,\n" +
                "       a.SHList,\n" +
                "       a.ShareCharacterStatement,\n" +
                "       a.HoldSum,\n" +
                "       round(a.PCTOfTotalShares/100,4),\n" +
                "       a.HoldSumChange,\n" +
                "       round(a.HoldSumChangeRate/100,4),\n" +
                "       a.EndDate,\n" +
                "       '' as created_date,\n" +
                "       '' as updated_date,\n" +
                "       d.key_no,\n" +
                "       '' as encrypt_key,\n" +
                "       regexp_replace(current_date(),'-',''),\n" +
                "       \"1\" as isadd\n" +
                "FROM\n" +
                "  (select * from a_dc_ep_ods.LC_MainSHListNew  where InfoTypeCode = 1)a\n" +
                "INNER JOIN\n" +
                "  (select * from a_dc_ep_ods.SecuMain where ListedState = 1 and ListedSector not in (3,7) and SecuCategory = 1) b on a.CompanyCode = b.CompanyCode\n" +
                "LEFT JOIN\n" +
                "  (select * from a_dc_ep_ods.LC_InstiArchive where IfExisted=1) c on a.CompanyCode = c.ListedCode\n" +
                "LEFT JOIN\n" +
                "  (select credit_code,key_no from a_dc_ep_ods.t_eci_company where credit_code IS NOT NULL and credit_code!='') d ON upper(trim(d.credit_code)) = upper(trim(c.CreditCode))").union(spark.sql("SELECT md5(concat(300,a.ID)) as ID,\n" +
                "       b.SecuCode,\n" +
                "       a.SHName,\n" +
                "       a.ShareCharacterStatement,\n" +
                "       a.HoldSum,\n" +
                "       round(a.PCTOfTotalShares/100,4),\n" +
                "       a.HoldSumChange,\n" +
                "       round(a.HoldSumChangeRate/100,4),\n" +
                "       a.EndDate,\n" +
                "       '' as created_date,\n" +
                "       '' as updated_date,\n" +
                "       d.key_no,\n" +
                "       '' as encrypt_key,\n" +
                "       regexp_replace(current_date(),'-',''),\n" +
                "       \"1\" as isadd\n" +
                "FROM\n" +
                "  (select * from a_dc_ep_ods.LC_STIBMainSHList  where InfoTypeCode = 1)a\n" +
                "INNER JOIN\n" +
                "  (select * from a_dc_ep_ods.SecuMain where ListedState = 1 and ListedSector =7 and SecuCategory = 1) b on a.CompanyCode = b.CompanyCode\n" +
                "LEFT JOIN\n" +
                "  (select * from a_dc_ep_ods.LC_STIBStockArchives) c on a.CompanyCode = c.CompanyCode\n" +
                "LEFT JOIN\n" +
                "  (select credit_code,key_no from a_dc_ep_ods.t_eci_company where credit_code IS NOT NULL and credit_code!='') d ON upper(trim(d.credit_code)) = upper(trim(c.CreditCode))")).union(spark.sql("SELECT md5(concat(200,a.ID)) as ID,\n" +
                "       b.SecuCode,\n" +
                "       a.SHName,\n" +
                "       '' as type,\n" +
                "       a.HoldSumEnd,\n" +
                "       a.HoldingRatioEnd,\n" +
                "       a.HoldChange,\n" +
                "       '' as change_proportion,\n" +
                "       a.EndDate,\n" +
                "       '' as created_date,\n" +
                "       '' as updated_date,\n" +
                "       d.key_no,\n" +
                "       '' as encrypt_key,\n" +
                "       regexp_replace(current_date(),'-',''),\n" +
                "       \"1\" as isadd\n" +
                "FROM\n" +
                "  (select * from a_dc_ep_ods.NQ_Top10SH)a\n" +
                "INNER JOIN\n" +
                "  (select * from a_dc_ep_ods.SecuMain where ListedState = 1 and ListedSector =3 and SecuCategory = 1) b on a.CompanyCode = b.CompanyCode\n" +
                "LEFT JOIN\n" +
                "  (select * from a_dc_ep_ods.NQ_ComArchive) c on a.CompanyCode = c.CompanyCode\n" +
                "LEFT JOIN\n" +
                "  (select credit_code,key_no from a_dc_ep_ods.t_eci_company where credit_code IS NOT NULL and credit_code!='') d ON upper(trim(d.credit_code)) = upper(trim(c.CreditCode))"));
        insert_data.createOrReplaceTempView("tmp_t_ipo_company_toptenholder1");
        spark.sql("insert overwrite table a_dc_ep_incr.t_ipo_company_toptenholder select a.* from tmp_t_ipo_company_toptenholder1 a inner join (select secucode,max(enddate) as enddate from tmp_t_ipo_company_toptenholder1 group by secucode)b on a.secucode=b.secucode and a.enddate=b.enddate");
        spark.stop();
    }
}
