package zjs.dc.controller;

import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;

/**
 * @Class:
 * @Author: kongcb
 * @Description: 只有新增数据
 * @Date: 2021/12/15 10:29
 */
public class TNewsInfo {
    public static void main(String[] args) {
        SparkSession spark = SparkSession.builder().appName("t_newsinfo").enableHiveSupport().getOrCreate();
        spark.sparkContext().setLogLevel("ERROR");
        Dataset<Row> zs_data = spark.sql("select * from a_dc_ep_ods.zs_t_news_fcdb");
        long zs_count = zs_data.count();
        if(zs_count>0){

            //        //企查查新闻舆情表(a_dc_ep_dwi.t_newsinfo)关联主表(a_dc_ep_ods.t_eci_company)取统一社会信用代码
//        spark.sql("SELECT a.*,\n" +
//                "       b.credit_code\n" +
//                "FROM a_dc_ep_dwi.t_newsinfo a\n" +
//                "LEFT JOIN a_dc_ep_ods.t_eci_company b ON a.company_id = b.company_id\n" +
//                "WHERE b.company_id IS NOT NULL and a.impact='negative'").createOrReplaceTempView("qcc_newsinfo");
            //中数负面新闻舆情表（a_dc_ep_ods.zs_t_news_fcdb）和对应的删除表（a_dc_ep_ods.zs_t_news_fcdb_del）关联取统一社会信用代码
            spark.sql("SELECT c.*,\n" +
                    "d.creditcode\n" +
                    "FROM a_dc_ep_ods.zs_t_news_fcdb c\n" +
                    "INNER JOIN a_dc_ep_ods.zs_t_news_fcdb_del d ON c.entid = d.entid").createOrReplaceTempView("zs_newsinfo");
            //删除：企查查的新闻舆情表（a_dc_ep_dwi.t_newsinfo）和中数对应的删除表（a_dc_ep_ods.zs_t_news_fcdb_del）关联，然后标记删除。
//        Dataset<Row> del_sql = spark.sql("select qcc.* from a_dc_ep_ods.zs_t_news_fcdb_del zs inner join qcc_newsinfo qcc on upper(trim(qcc.credit_code)) = upper(trim(zs.creditcode))");
//        del_sql.createOrReplaceTempView("zs_del_newsinfo");
//        spark.sql("insert into table a_dc_ep_incr.t_newsinfo select " +
//                "a.`id`,\n" +
//                "a.`key_no`,\n" +
//                "a.`company_id`,\n" +
//                "a.`company_name`,\n" +
//                "a.`news_id`,\n" +
//                "a.`title`,\n" +
//                "a.`source`,\n" +
//                "a.`publish_time`,\n" +
//                "a.`impact`,\n" +
//                "a.`tags`,\n" +
//                "a.`description`,\n" +
//                "a.`url`,\n" +
//                "a.`image_url`,\n" +
//                "a.`update_time`,\n" +
//                "a.`dates`,\n" +
//                "'-1' as isadd \n" +
//                "from zs_del_newsinfo a");
            //select   from (select *,ROW_NUMBER() OVER(PARTITION BY company_id ORDER BY dates DESC,isadd DESC) num from  a_dc_ep_all_ods.t_eci_company) a where a.num=1 and a.isadd != '-1'
            //更新：通过把中数负面新闻舆情的数据
            spark.sql("SELECT zs.*,qcc.company_id,qcc.key_no,qcc.company_name \n" +
                    "FROM zs_newsinfo  zs\n" +
                    "LEFT JOIN (select e.* from (select aa.*,ROW_NUMBER() OVER(PARTITION BY aa.credit_code ORDER BY aa.dates DESC,aa.isadd DESC) num from a_dc_ep_ods.t_eci_company aa ) e where e.num = 1 and e.credit_code IS NOT NULL and e.credit_code !='') qcc ON upper(trim(zs.creditcode)) = upper(trim(qcc.credit_code))\n" +
                    "WHERE qcc.credit_code IS NOT NULL").createOrReplaceTempView("zs_insert_tmp");
            spark.sql("insert overwrite table a_dc_ep_incr.t_newsinfo select zs.RECORD_ID,zs.key_no,zs.company_id,zs.company_name,md5(zs.NEWSTITLE),zs.NEWSTITLE,zs.NEWSSOURCE,zs.PUBLISHTIME,'negative' as impact,EFFECT as tags,zs.NEWSBODY,zs.NEWSURL,'' image_url,zs.PUBLISHTIME,zs.jobid,'1' as isadd from zs_insert_tmp zs");

        }else {
            spark.sql("TRUNCATE TABLE a_dc_ep_incr.t_newsinfo");
            System.out.println("中数本期数据涉及该表的数据为空。。。。。。");
        }
        spark.stop();
    }
}
