package zjs.dc.controller;

import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;

/**
 * @Class:
 * @Author: guoml
 * @Description: 针对企查查t_ipo_company_info表和聚源表进行数据映射开发
 * @Date: 2021/12/7 12:51
 */

public class TIpoCompanyInfo {
    public static void main(String[] args) {
        SparkSession spark = SparkSession.builder().appName("t_ipo_company_info").enableHiveSupport().getOrCreate();
        spark.sparkContext().setLogLevel("ERROR");
        spark.sql("select * from a_dc_ep_ods.CT_SystemConst m WHERE m.LB in (201,207,1177,1018,1184) and m.DM is not null").createOrReplaceTempView("first_table");
        spark.sql("select a.*,(case when b.ChiName is not null then b.ChiName when d.ChiName is not null then d.ChiName else c.ChiName end) as ChiName,(case when b.GeneralManager is not null then b.GeneralManager when d.GeneralManager is not null then d.GeneralManager else c.GeneralManager end) as GeneralManager,(case when b.LegalRepr is not null then b.LegalRepr when d.LegalPersonRepr is not null then d.LegalPersonRepr else c.LegalPersonRepr end) as LegalRepr,(case when b.SecretaryBD is not null then b.SecretaryBD else c.SecretaryBD end) as SecretaryBD,(case when b.SecuAffairsRepr is not null then b.SecuAffairsRepr else null end) as SecuAffairsRepr,(case when b.ContactTel is not null then b.ContactTel when d.Tel is not null then d.Tel else c.ContactTel end) as ContactTel,(case when b.ContactEmail is not null then b.ContactEmail when d.Email is not null then d.Email else c.Email end) as ContactEmail,(case when b.ContactFax is not null then b.ContactFax when d.Fax is not null then d.Fax else c.ContactFax end) as ContactFax,(case when b.Website is not null then b.Website when d.Website is not null then d.Website else c.Website end) as Website,(case when b.OfficeAddr is not null then b.OfficeAddr when d.OfficeAddr is not null then d.OfficeAddr else c.OfficeAddr end) as OfficeAddr,(case when b.RegAddr is not null then b.RegAddr when d.RegAddr is not null then d.RegAddr else c.RegAddr end) as RegAddr,(case when b.State is not null then b.State else c.Province end) as State,(case when b.ConatactZipCode is not null then b.ConatactZipCode when d.ContactZip is not null then d.ContactZip else c.ContactZip end) as ConatactZipCode,(case when b.LegalConsultant is not null then b.LegalConsultant else null end) as LegalConsultant,(case when b.AccountingFirm is not null then b.AccountingFirm else null end) as AccountingFirm,(case when b.BriefIntroText is not null then b.BriefIntroText when d.BriefIntroText is not null then d.BriefIntroText else c.BriefIntroText end) as BriefIntroText,(case when b.BusinessMajor is not null then b.BusinessMajor when d.BusinessMajor is not null then d.BusinessMajor else null end) as BusinessMajor,(case when b.EstablishmentDate is not null then b.EstablishmentDate when d.EstablishmentDt is not null then d.EstablishmentDt else c.EstablishmentDate end) as EstablishmentDate from (select secucode,innercode,secuabbr,companycode,SecuMarket,ListedSector,SecuCategory from a_dc_ep_ods.Secumain where SecuCategory=1 and ListedState=1)a left join a_dc_ep_ods.LC_StockArchives b on a.CompanyCode = b.CompanyCode left join a_dc_ep_ods.LC_STIBStockArchives c on a.CompanyCode = c.CompanyCode left join a_dc_ep_ods.NQ_ComArchive d on a.CompanyCode = d.CompanyCode").createOrReplaceTempView("tmp_table1");
        spark.sql("select a.*,c.EngName,c.RegCapital,c.CreditCode from tmp_table1 a left join (select * from a_dc_ep_ods.LC_InstiArchive where IfExisted=1) c ON a.CompanyCode = c.ListedCode ").createOrReplaceTempView("tmp_table2 ");
        spark.sql("select a.*,d.SecurityAbbr from tmp_table2 a left join (select innercode,regexp_replace(concat_ws('->',sort_array(collect_list(concat_ws(':',cast(rn as string),SecurityAbbr)))),'\\\\d\\:','') as SecurityAbbr from (select innercode,SecurityAbbr,row_number() over(partition by innercode order by InfoPublDate ASC) as rn from a_dc_ep_ods.LC_SecuChange where IfPassed = 0)t group by innercode)d on a.InnerCode = d.InnerCode").createOrReplaceTempView("tmp_table3");
        spark.sql("select a.*,e.secucode as secucodeB,e.secuabbr as secuabbrB from tmp_table3 a left join (select * from a_dc_ep_ods.Secumain where ListedState = 1 and SecuCategory =2)e on a.CompanyCode = e.CompanyCode ").createOrReplaceTempView("tmp_table4");
        spark.sql("select a.*,f.secucode as secucodeH,f.secuabbr as secuabbrH from tmp_table4 a left join (select * from a_dc_ep_ods.hk_Secumain where ListedState = 1 and SecuCategory =3)f on a.CompanyCode = f.CompanyCode ").createOrReplaceTempView("tmp_table5");
        spark.sql("select a.*,g.ms1 from tmp_table5 a left join (SELECT ms as ms1,dm FROM first_table m WHERE m.LB = 201)g ON a.SecuMarket = g.DM ").createOrReplaceTempView("tmp_table6");
        spark.sql("select a.*,h.ms2 from tmp_table6 a left join (SELECT ms as ms2,dm FROM first_table m WHERE m.LB = 207)h ON a.ListedSector = h.DM ").createOrReplaceTempView("tmp_table7");
        spark.sql("select a.*,i.ms3 from tmp_table7 a left join (SELECT ms as ms3,dm FROM first_table m WHERE m.LB = 1177)i ON a.SecuCategory = i.DM ").createOrReplaceTempView("tmp_table8");
        spark.sql("select a.*,j.FirstIndustryName,j.SecondIndustryName,j.ThirdIndustryName,j.FourthIndustryName from tmp_table8 a left join (select a.companycode,a.FirstIndustryName,a.SecondIndustryName,a.ThirdIndustryName,a.FourthIndustryName from a_dc_ep_ods.LC_ExgIndustry a join (select CompanyCode,max(InfoPublDate) as maxdate from a_dc_ep_ods.LC_ExgIndustry where IfPerformed = 1 and Standard = 38 group by CompanyCode)b on a.CompanyCode = b.CompanyCode and a.InfoPublDate = b.maxdate where a.IfPerformed =1 and a.Standard = 38)j on a.CompanyCode = j.CompanyCode ").createOrReplaceTempView("tmp_table9");
        spark.sql("select a.*,k.LeaderName as director from tmp_table9 a left join (select CompanyCode,LeaderName from a_dc_ep_ods.LC_LeaderPosition where IfPosition = 1 and PositionCode =1)k on a.CompanyCode = k.CompanyCode ").createOrReplaceTempView("tmp_table10");
        spark.sql("select a.*,l.independent_director from tmp_table10 a left join (select CompanyCode,CONCAT_WS(',',collect_list(LeaderName)) as independent_director from a_dc_ep_ods.LC_LeaderPosition where IfPosition = 1 and PositionCode =4 group by CompanyCode)l on a.CompanyCode = l.CompanyCode ").createOrReplaceTempView("tmp_table11");
        spark.sql("select a.*,m.ms4 from tmp_table11 a left join (SELECT AreaChiName as ms4,AreaInnerCode FROM a_dc_ep_ods.LC_AreaCode m)m ON a.State = m.AreaInnerCode ").createOrReplaceTempView("tmp_table12");
        spark.sql("select a.*,n.ID,n.ListDate,n.WeightedPERatio,n.IssueStartDate,n.ParValue,n.IssueVol,n.IssuePrice,n.IssueCost,n.TotalIssueMV,n.IPONetProceeds,n.FirstOpenPrice,n.FirstClosePrice,n.FirstTurnover,n.FirstHighPrice,n.LotRateLP,n.LotRateOnline from tmp_table12 a left join a_dc_ep_ods.LC_AShareIPO n on a.InnerCode = n.InnerCode ").createOrReplaceTempView("tmp_table13");
        spark.sql("select a.*,X.releases_type from tmp_table13 a left join (select ID,CONCAT_WS(',',collect_list(ms5)) as releases_type FROM (select ID,code from a_dc_ep_ods.LC_AShareIPO_SE where typecode=3)o join (SELECT ms as ms5,dm FROM first_table m WHERE m.LB = 1018)p ON o.code = p.DM group by id)X on a.ID = X.ID ").createOrReplaceTempView("tmp_table14");
//        spark.sql("select a.*,p.ms5 from tmp_table14 a left join (SELECT ms as ms5,dm FROM first_table m WHERE m.LB = 1018)p ON a.code = p.DM ").createOrReplaceTempView("tmp_table15");
        spark.sql("select a.*,q.ChangeType from tmp_table14 a left join (select a.innercode,a.ChangeType from a_dc_ep_ods.LC_ListStatus a join (select innercode,max(ChangeDate) as maxdate from a_dc_ep_ods.LC_ListStatus group by innercode)b on a.innercode = b.innercode and a.ChangeDate = b.maxdate)q on a.InnerCode = q.InnerCode ").createOrReplaceTempView("tmp_table16");
        spark.sql("select a.*,r.ms6 from tmp_table16 a left join (SELECT ms as ms6,dm FROM first_table m WHERE m.LB = 1184)r ON a.ChangeType = r.DM ").createOrReplaceTempView("tmp_table17");
        spark.sql("select a.*,s.key_no,s.company_id from tmp_table17 a left join (select credit_code,key_no,company_id from a_dc_ep_ods.t_eci_company where credit_code IS NOT NULL and credit_code!='') s ON upper(trim(s.credit_code)) = upper(trim(a.CreditCode)) ").createOrReplaceTempView("tmp_table18");
        spark.sql("select a.*,t.FirstIndustryName as FirstIndustryName1,t.SecondIndustryName as SecondIndustryName1,t.ThirdIndustryName as ThirdIndustryName1,t.FourthIndustryName as FourthIndustryName1 from tmp_table18 a left join (select a.companycode,a.FirstIndustryName,a.SecondIndustryName,a.ThirdIndustryName,a.FourthIndustryName from a_dc_ep_ods.LC_STIBExgIndustry a join (select CompanyCode,max(InfoPublDate) as maxdate from a_dc_ep_ods.LC_STIBExgIndustry where IfExecuted = 1 and Standard = 38 group by CompanyCode)b on a.CompanyCode = b.CompanyCode and a.InfoPublDate = b.maxdate where a.IfExecuted =1 and a.Standard = 38)t on a.CompanyCode = t.CompanyCode ").createOrReplaceTempView("tmp_table19");
        spark.sql("select a.*,(case when u.EmployeeSum is not null then u.EmployeeSum when w.StaffNumber is not null then w.StaffNumber else v.EmployeeSum end) as EmployeeSum from tmp_table19 a left join (select a.* from a_dc_ep_ods.LC_Staff a join (select CompanyCode,max(enddate) as enddate from a_dc_ep_ods.LC_Staff where MergeMark = 1 and ClassfiedMethod = 9000 group by companycode)b on a.companycode = b.companycode and a.enddate = b.enddate where a.MergeMark = 1 and a.ClassfiedMethod = 9000)u on a.CompanyCode = u.CompanyCode left join (select a.* from a_dc_ep_ods.LC_STIBStaff a join (select CompanyCode,max(enddate) as enddate from a_dc_ep_ods.LC_STIBStaff where IfMerged = 1 and Classification = 9000 group by companycode)b on a.companycode = b.companycode and a.enddate = b.enddate where a.IfMerged = 1 and a.Classification = 9000)v on a.CompanyCode = v.CompanyCode left join (select a.* from a_dc_ep_ods.NQ_Staff a join (select CompanyCode,max(enddate) as enddate,max(infopubldate) as infopubldate from a_dc_ep_ods.NQ_Staff where Standard = 9000 group by companycode)b on a.companycode = b.companycode and a.enddate = b.enddate and a.infopubldate = b.infopubldate where a.Standard = 9000)w on a.CompanyCode = w.CompanyCode").createOrReplaceTempView("tmp_table20");



//        spark.catalog().dropTempView("first_table");
        //获取中数增量数据表中的新增数据（left join）
        Dataset<Row> insert_data = spark.sql("SELECT if(length(key_no)>0,key_no,md5(ChiName)),\n" +
                "       if(length(company_id)>0,company_id,md5(ChiName)),\n" +
                "       ChiName,\n" +
                "       secucode,\n" +
                "       EngName,\n" +
                "       SecurityAbbr,\n" +
                "       secucode,\n" +
                "       secuabbr,\n" +
                "       secucodeB,\n" +
                "       secuabbrB,\n" +
                "       secucodeH,\n" +
                "       secuabbrH,\n" +
                "       concat(ms1,ms2,ms3),\n" +
                "       case when ListedSector = 7 then case when FourthIndustryName1 is not null then FourthIndustryName1 when ThirdIndustryName1 is not null then ThirdIndustryName1 when SecondIndustryName1 is not null then SecondIndustryName1 when FirstIndustryName1 is not null then FirstIndustryName1 end\n" +
                "       else case when FourthIndustryName is not null then FourthIndustryName when ThirdIndustryName is not null then ThirdIndustryName when SecondIndustryName is not null then SecondIndustryName when FirstIndustryName is not null then FirstIndustryName end end as industry,\n" +
                "       GeneralManager,\n" +
                "       LegalRepr,\n" +
                "       SecretaryBD,\n" +
                "       director,\n" +
                "       SecuAffairsRepr,\n" +
                "       independent_director,\n" +
                "       ContactTel,\n" +
                "       ContactEmail,\n" +
                "       ContactFax,\n" +
                "       Website,\n" +
                "       OfficeAddr,\n" +
                "       RegAddr,\n" +
                "       ms4,\n" +
                "       ConatactZipCode,\n" +
                "       RegCapital,\n" +
                "       CreditCode,\n" +
                "       EmployeeSum,\n" +
                "       '' as manager_number,\n" +
                "       LegalConsultant,\n" +
                "       AccountingFirm,\n" +
                "       BriefIntroText,\n" +
                "       BusinessMajor,\n" +
                "       ListDate,\n" +
                "       '' as yesterday_close_price,\n" +
                "       '' as today_open_price,\n" +
                "       ms6,\n" +
                "       EstablishmentDate,\n" +
                "       WeightedPERatio,\n" +
                "       IssueStartDate,\n" +
                "       releases_type,\n" +
                "       ParValue,\n" +
                "       IssueVol,\n" +
                "       IssuePrice,\n" +
                "       IssueCost,\n" +
                "       TotalIssueMV,\n" +
                "       IPONetProceeds,\n" +
                "       FirstOpenPrice,\n" +
                "       FirstClosePrice,\n" +
                "       FirstTurnover,\n" +
                "       FirstHighPrice,\n" +
                "       LotRateLP,\n" +
                "       LotRateOnline,\n" +
                "       '' as created_date,\n" +
                "       '' as updated_date,\n" +
                "       regexp_replace(current_date(),'-',''),\n" +
                "       \"1\" as isadd\n" +
                "FROM\n" +
                "  tmp_table20");
        insert_data.createOrReplaceTempView("tmp_t_ipo_company_info1");
        spark.sql("insert overwrite table a_dc_ep_incr.t_ipo_company_info select * from tmp_t_ipo_company_info1");
        spark.stop();
    }
}
