package cn.tecnova.Synchronous

import cn.tecnova.bean._
import cn.tecnova.utils.{BatopicUtils, ConfigHandler}
import com.alibaba.fastjson.{JSON, JSONObject}
import com.google.gson.Gson
import org.apache.commons.lang.StringUtils
import org.apache.kafka.clients.consumer.ConsumerRecord
import org.apache.log4j.{Level, Logger}
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{DataFrame, SaveMode}
import org.apache.spark.sql.hive.HiveContext
import org.apache.spark.streaming.dstream.InputDStream
import org.apache.spark.streaming.kafka010._
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.{SparkConf, SparkContext}

/**
  * description:业务分析流kafka数据存储ES Hive
  **/
object BaTopic2EsAndHive {

//  System.setProperty("HADOOP_USER_NAME", "root")
  Logger.getLogger("org").setLevel(Level.ERROR)

  def main(args: Array[String]): Unit = {

    val conf = new SparkConf()
      .setAppName(this.getClass.getSimpleName)
      //      .setMaster("local[*]")
      .set("spark.streaming.kafka.maxRatePerPartition", args(0))
      .set("spark.streaming.stopGracefullyOnShutdown", "true")
      .set("es.index.auto.create", "true")
      .set("es.nodes", ConfigHandler.esNodes)
      .set("es.port", ConfigHandler.esPort)
      .set("es.nodes.wan.only", "true")
      .set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
      .registerKryoClasses(Array(classOf[BaAnalysisBean],classOf[BaVolume],classOf[BaUserWarning],classOf[BaArticleLabel],classOf[BaArticleNlpAnalysisRes],classOf[BaSubjectNlpRes],classOf[BaRiskJudge]))

    val sc = new SparkContext(conf)

    val hiveContext = new HiveContext(sc)

    //导入隐士转换
    import hiveContext.implicits._

    val ssc = new StreamingContext(sc, Seconds(args(1).toInt))

    //业务分析流kafka数据
    val baArr: Array[String] = ConfigHandler.batopic.split(",")

    val groupid = "g_batopic2esandhive"

    //所有的kafka topic
    val allData: InputDStream[ConsumerRecord[String, String]] = KafkaUtils.createDirectStream[String, String](
      ssc,
      LocationStrategies.PreferConsistent, // 将拉取到的数据，均匀分散到每台Executor节点上
      ConsumerStrategies.Subscribe[String, String](baArr, ConfigHandler.kafkaParams(groupid))
    )

    allData.foreachRDD(rdd => {

      //导入写es相关的包
      import org.elasticsearch.spark.sql._

      //获取当前批次偏移量信息
      val offsetRanges = rdd.asInstanceOf[HasOffsetRanges].offsetRanges

      if (!rdd.isEmpty()) {

        //业务分析流
        //ba_area_relation_article
        val baAreaRelationArticle: RDD[BaAnalysisBean] = BatopicUtils.getBaseBeanRDD(rdd, "ba_area_relation_article").coalesce(10)
        if (!baAreaRelationArticle.isEmpty()) {
          val baAreaRelationArticleDF: DataFrame = baAreaRelationArticle.toDF().cache()
//          hiveContext.sql("use test")
//          hiveContext.sql("create table if not exists ba_area_relation_article (id String,site_id String,template_source String,uuid String,site_name String,site_url String,domain String,weight String,main_id String,site_cls String,enterprise_scale_cls String,customer_cls String,media_cls String,content_cls String,important_leavel String,industry_cls String,language_cls String,area_cls String,site_province String,site_city String,site_district String,schedule String,article_url String,code String,article_html String,article_channel String,article_title String,article_author String,article_source String,article_pubdate String,article_view String,article_reply String,article_content String,article_like String,article_forword String,gmt_create String,nlp_emotion String,baseflow_hit_keywords String,baflow_id String,baflow_rule_id String,baflow_dest_table_name String,baflow_rule_src_table_name String,baflow_rule_hitwords String,baflow_rule_hitwords_weight String,user_id String,product_id String,product_name String,industry_id String,industry_name String,area_id String,area_name String,company_id String,company_name String,company_focus_flag String,person_id String,person_name String,person_focus_flag String,public_sentiment_type String,event_id String,event_name String,diy_risk_group_id String,diy_risk_group_name String,diy_risk_rule_id String,topic_deliver_flag String,ba_update_time String,reserve1 String,reserve2 String,reserve3 String,reserve4 String,reserve5 String,baseflow_type String,topic_name String)ROW FORMAT DELIMITED FIELDS TERMINATED BY '\\t'")
//          baAreaRelationArticleDF.write.mode(SaveMode.Append).insertInto("test.ba_area_relation_article")
          baAreaRelationArticleDF.saveToEs("ba_area_relation_article" + "/ba_area_relation_article")
        }

        //ba_company_relation_article
        val baCompanyRelationArticle: RDD[BaAnalysisBean] = BatopicUtils.getBaseBeanRDD(rdd, "ba_company_relation_article").coalesce(10)
        if (!baCompanyRelationArticle.isEmpty()) {
          val baCompanyRelationArticleDF: DataFrame = baCompanyRelationArticle.toDF().cache()
//          hiveContext.sql("use test")
//          hiveContext.sql("create table if not exists ba_company_relation_article (id String,site_id String,template_source String,uuid String,site_name String,site_url String,domain String,weight String,main_id String,site_cls String,enterprise_scale_cls String,customer_cls String,media_cls String,content_cls String,important_leavel String,industry_cls String,language_cls String,area_cls String,site_province String,site_city String,site_district String,schedule String,article_url String,code  String,article_html String,article_channel String,article_title String,article_author String,article_source String,article_pubdate String,article_view String,article_reply String,article_content String,article_like String,article_forword String,gmt_create String,nlp_emotion String,baseflow_hit_keywords String,baflow_id String,baflow_rule_id String,baflow_dest_table_name String,baflow_rule_src_table_name String,baflow_rule_hitwords String,baflow_rule_hitwords_weight String,user_id String,product_id String,product_name String,industry_id String,industry_name String,area_id String,area_name String,company_id String,company_name String,company_focus_flag String,person_id String,person_name String,person_focus_flag String,public_sentiment_type String,event_id String,event_name String,diy_risk_group_id String,diy_risk_group_name String,diy_risk_rule_id String,topic_deliver_flag String,ba_update_time String,reserve1 String,reserve2 String,reserve3 String,reserve4 String,reserve5 String,baseflow_type String,topic_name String)ROW FORMAT DELIMITED FIELDS TERMINATED BY '\\t'")
//          baCompanyRelationArticleDF.write.mode(SaveMode.Append).insertInto("test.ba_company_relation_article")
          baCompanyRelationArticleDF.saveToEs("ba_company_relation_article" + "/ba_company_relation_article")
        }

        //ba_person_relation_article
        val baPersonRelationArticle: RDD[BaAnalysisBean] = BatopicUtils.getBaseBeanRDD(rdd, "ba_person_relation_article").coalesce(10)
        if (!baPersonRelationArticle.isEmpty()) {
          val baPersonRelationArticleDF: DataFrame = baPersonRelationArticle.toDF().cache()
//          hiveContext.sql("use test")
//          hiveContext.sql("create table if not exists ba_person_relation_article (id String,site_id String,template_source String,uuid String,site_name String,site_url String,domain String,weight String,main_id String,site_cls String,enterprise_scale_cls String,customer_cls String,media_cls String,content_cls String,important_leavel String,industry_cls String,language_cls String,area_cls String,site_province String,site_city String,site_district String,schedule String,article_url String,code  String,article_html String,article_channel String,article_title String,article_author String,article_source String,article_pubdate String,article_view String,article_reply String,article_content String,article_like String,article_forword String,gmt_create String,nlp_emotion String,baseflow_hit_keywords String,baflow_id String,baflow_rule_id String,baflow_dest_table_name String,baflow_rule_src_table_name String,baflow_rule_hitwords String,baflow_rule_hitwords_weight String,user_id String,product_id String,product_name String,industry_id String,industry_name String,area_id String,area_name String,company_id String,company_name String,company_focus_flag String,person_id String,person_name String,person_focus_flag String,public_sentiment_type String,event_id String,event_name String,diy_risk_group_id String,diy_risk_group_name String,diy_risk_rule_id String,topic_deliver_flag String,ba_update_time String,reserve1 String,reserve2 String,reserve3 String,reserve4 String,reserve5 String,baseflow_type String,topic_name String)ROW FORMAT DELIMITED FIELDS TERMINATED BY '\\t'")
//          baPersonRelationArticleDF.write.mode(SaveMode.Append).insertInto("test.ba_person_relation_article")
          baPersonRelationArticleDF.saveToEs("ba_person_relation_article" + "/ba_person_relation_article")
        }

        //ba_industry_relation_article
        val baIndustryRelationArticle: RDD[BaAnalysisBean] = BatopicUtils.getBaseBeanRDD(rdd, "ba_industry_relation_article").coalesce(10)
        if (!baIndustryRelationArticle.isEmpty()) {
          val baIndustryRelationArticleDF: DataFrame = baIndustryRelationArticle.toDF().cache()
//          hiveContext.sql("use test")
//          hiveContext.sql("create table if not exists ba_industry_relation_article (id String,site_id String,template_source String,uuid String,site_name String,site_url String,domain String,weight String,main_id String,site_cls String,enterprise_scale_cls String,customer_cls String,media_cls String,content_cls String,important_leavel String,industry_cls String,language_cls String,area_cls String,site_province String,site_city String,site_district String,schedule String,article_url String,code  String,article_html String,article_channel String,article_title String,article_author String,article_source String,article_pubdate String,article_view String,article_reply String,article_content String,article_like String,article_forword String,gmt_create String,nlp_emotion String,baseflow_hit_keywords String,baflow_id String,baflow_rule_id String,baflow_dest_table_name String,baflow_rule_src_table_name String,baflow_rule_hitwords String,baflow_rule_hitwords_weight String,user_id String,product_id String,product_name String,industry_id String,industry_name String,area_id String,area_name String,company_id String,company_name String,company_focus_flag String,person_id String,person_name String,person_focus_flag String,public_sentiment_type String,event_id String,event_name String,diy_risk_group_id String,diy_risk_group_name String,diy_risk_rule_id String,topic_deliver_flag String,ba_update_time String,reserve1 String,reserve2 String,reserve3 String,reserve4 String,reserve5 String,baseflow_type String,topic_name String)ROW FORMAT DELIMITED FIELDS TERMINATED BY '\\t'")
//          baIndustryRelationArticleDF.write.mode(SaveMode.Append).insertInto("test.ba_industry_relation_article")
          baIndustryRelationArticleDF.saveToEs("ba_industry_relation_article" + "/ba_industry_relation_article")
        }

        //ba_diy_risk_relation_article
        val baDiyRiskRelationArticle: RDD[BaAnalysisBean] = BatopicUtils.getBaseBeanRDD(rdd, "ba_diy_risk_relation_article").coalesce(10)
        if (!baDiyRiskRelationArticle.isEmpty()) {
          val baDiyRiskRelationArticleDF: DataFrame = baDiyRiskRelationArticle.toDF().cache()
//          hiveContext.sql("use test")
//          hiveContext.sql("create table if not exists ba_diy_risk_relation_article (id String,site_id String,template_source String,uuid String,site_name String,site_url String,domain String,weight String,main_id String,site_cls String,enterprise_scale_cls String,customer_cls String,media_cls String,content_cls String,important_leavel String,industry_cls String,language_cls String,area_cls String,site_province String,site_city String,site_district String,schedule String,article_url String,code  String,article_html String,article_channel String,article_title String,article_author String,article_source String,article_pubdate String,article_view String,article_reply String,article_content String,article_like String,article_forword String,gmt_create String,nlp_emotion String,baseflow_hit_keywords String,baflow_id String,baflow_rule_id String,baflow_dest_table_name String,baflow_rule_src_table_name String,baflow_rule_hitwords String,baflow_rule_hitwords_weight String,user_id String,product_id String,product_name String,industry_id String,industry_name String,area_id String,area_name String,company_id String,company_name String,company_focus_flag String,person_id String,person_name String,person_focus_flag String,public_sentiment_type String,event_id String,event_name String,diy_risk_group_id String,diy_risk_group_name String,diy_risk_rule_id String,topic_deliver_flag String,ba_update_time String,reserve1 String,reserve2 String,reserve3 String,reserve4 String,reserve5 String,baseflow_type String,topic_name String)ROW FORMAT DELIMITED FIELDS TERMINATED BY '\\t'")
//          baDiyRiskRelationArticleDF.write.mode(SaveMode.Append).insertInto("test.ba_diy_risk_relation_article")
          baDiyRiskRelationArticleDF.saveToEs("ba_diy_risk_relation_article" + "/ba_diy_risk_relation_article")
        }

        //ba_event_relation_article
        val baEventRelationArticle: RDD[BaAnalysisBean] = BatopicUtils.getBaseBeanRDD(rdd, "ba_event_relation_article").coalesce(10)
        if (!baEventRelationArticle.isEmpty()) {
          val baEventRelationArticleDF: DataFrame = baEventRelationArticle.toDF().cache()
//          hiveContext.sql("use test")
//          hiveContext.sql("create table if not exists ba_event_relation_article (id String,site_id String,template_source String,uuid String,site_name String,site_url String,domain String,weight String,main_id String,site_cls String,enterprise_scale_cls String,customer_cls String,media_cls String,content_cls String,important_leavel String,industry_cls String,language_cls String,area_cls String,site_province String,site_city String,site_district String,schedule String,article_url String,code  String,article_html String,article_channel String,article_title String,article_author String,article_source String,article_pubdate String,article_view String,article_reply String,article_content String,article_like String,article_forword String,gmt_create String,nlp_emotion String,baseflow_hit_keywords String,baflow_id String,baflow_rule_id String,baflow_dest_table_name String,baflow_rule_src_table_name String,baflow_rule_hitwords String,baflow_rule_hitwords_weight String,user_id String,product_id String,product_name String,industry_id String,industry_name String,area_id String,area_name String,company_id String,company_name String,company_focus_flag String,person_id String,person_name String,person_focus_flag String,public_sentiment_type String,event_id String,event_name String,diy_risk_group_id String,diy_risk_group_name String,diy_risk_rule_id String,topic_deliver_flag String,ba_update_time String,reserve1 String,reserve2 String,reserve3 String,reserve4 String,reserve5 String,baseflow_type String,topic_name String)ROW FORMAT DELIMITED FIELDS TERMINATED BY '\\t'")
//          baEventRelationArticleDF.write.mode(SaveMode.Append).insertInto("test.ba_event_relation_article")
          baEventRelationArticleDF.saveToEs("ba_event_relation_article" + "/ba_event_relation_article")
        }

        //ba_public_sentiment
        val baPublicSentiment: RDD[BaAnalysisBean] = BatopicUtils.getBaseBeanRDD(rdd, "ba_public_sentiment").coalesce(10)
        if (!baPublicSentiment.isEmpty()) {
          val baPublicSentimentDF: DataFrame = baPublicSentiment.toDF().cache()
//          hiveContext.sql("use test")
//          hiveContext.sql("create table if not exists ba_public_sentiment (id String,site_id String,template_source String,uuid String,site_name String,site_url String,domain String,weight String,main_id String,site_cls String,enterprise_scale_cls String,customer_cls String,media_cls String,content_cls String,important_leavel String,industry_cls String,language_cls String,area_cls String,site_province String,site_city String,site_district String,schedule String,article_url String,code  String,article_html String,article_channel String,article_title String,article_author String,article_source String,article_pubdate String,article_view String,article_reply String,article_content String,article_like String,article_forword String,gmt_create String,nlp_emotion String,baseflow_hit_keywords String,baflow_id String,baflow_rule_id String,baflow_dest_table_name String,baflow_rule_src_table_name String,baflow_rule_hitwords String,baflow_rule_hitwords_weight String,user_id String,product_id String,product_name String,industry_id String,industry_name String,area_id String,area_name String,company_id String,company_name String,company_focus_flag String,person_id String,person_name String,person_focus_flag String,public_sentiment_type String,event_id String,event_name String,diy_risk_group_id String,diy_risk_group_name String,diy_risk_rule_id String,topic_deliver_flag String,ba_update_time String,reserve1 String,reserve2 String,reserve3 String,reserve4 String,reserve5 String,baseflow_type String,topic_name String)ROW FORMAT DELIMITED FIELDS TERMINATED BY '\\t'")
//          baPublicSentimentDF.write.mode(SaveMode.Append).insertInto("test.ba_public_sentiment")
          baPublicSentimentDF.saveToEs("ba_public_sentiment" + "/ba_public_sentiment")
        }

        //ba_volume_analysis
        val baVolumeAnalysis: RDD[BaVolume] = rdd.filter(record => {
          val jsonObj: JSONObject = JSON.parseObject(record.value())
          val tName = jsonObj.getString("topic_name")
          "ba_volume_analysis".equals(tName)
        }).map(record => {
          val gson = new Gson()
          val baVolumeAnalysis: BaVolume = gson.fromJson(record.value(), classOf[BaVolume])
          baVolumeAnalysis
        }).coalesce(10)
        if (!baVolumeAnalysis.isEmpty()) {
          val baVolumeAnalysisDF: DataFrame = baVolumeAnalysis.toDF().cache()
//          hiveContext.sql("use test")
//          hiveContext.sql("create table if not exists ba_volume_analysis (id String,user_id String,daily_storage_volume String,daily_collection_volume String,daily_warning_volume String,daily_valid_volume String,update_time String,topic_name String)ROW FORMAT DELIMITED FIELDS TERMINATED BY '\\t'")
//          baVolumeAnalysisDF.write.mode(SaveMode.Append).insertInto("test.ba_volume_analysis")
          baVolumeAnalysisDF.saveToEs("ba_volume_analysis" + "/ba_volume_analysis")
        }

        //ba_daily_report
        val baDailyReport: RDD[BaAnalysisBean] = BatopicUtils.getBaseBeanRDD(rdd, "ba_daily_report").coalesce(10)
        if (!baDailyReport.isEmpty()) {
          val baDailyReportDF: DataFrame = baDailyReport.toDF().cache()
//          hiveContext.sql("use test")
//          hiveContext.sql("create table if not exists ba_daily_report (id String,site_id String,template_source String,uuid String,site_name String,site_url String,domain String,weight String,main_id String,site_cls String,enterprise_scale_cls String,customer_cls String,media_cls String,content_cls String,important_leavel String,industry_cls String,language_cls String,area_cls String,site_province String,site_city String,site_district String,schedule String,article_url String,code  String,article_html String,article_channel String,article_title String,article_author String,article_source String,article_pubdate String,article_view String,article_reply String,article_content String,article_like String,article_forword String,gmt_create String,nlp_emotion String,baseflow_hit_keywords String,baflow_id String,baflow_rule_id String,baflow_dest_table_name String,baflow_rule_src_table_name String,baflow_rule_hitwords String,baflow_rule_hitwords_weight String,user_id String,product_id String,product_name String,industry_id String,industry_name String,area_id String,area_name String,company_id String,company_name String,company_focus_flag String,person_id String,person_name String,person_focus_flag String,public_sentiment_type String,event_id String,event_name String,diy_risk_group_id String,diy_risk_group_name String,diy_risk_rule_id String,topic_deliver_flag String,ba_update_time String,reserve1 String,reserve2 String,reserve3 String,reserve4 String,reserve5 String,baseflow_type String,topic_name String)ROW FORMAT DELIMITED FIELDS TERMINATED BY '\\t'")
//          baDailyReportDF.write.mode(SaveMode.Append).insertInto("test.ba_daily_report")
          baDailyReportDF.saveToEs("ba_daily_report" + "/ba_daily_report")
        }

        //ba_user_warning
        val baUserWarning: RDD[BaUserWarning] = rdd.filter(record => {
          val jsonObj: JSONObject = JSON.parseObject(record.value())
          val tName = jsonObj.getString("topic_name")
          "ba_user_warning".equals(tName)
        }).map(record => {
          val gson = new Gson()
          val jsonObj: JSONObject = JSON.parseObject(record.value())
          val baUserWarning: BaUserWarning = gson.fromJson(record.value(), classOf[BaUserWarning])
          if (StringUtils.isNotEmpty(jsonObj.getString("warning_content"))) {
            val content = jsonObj.getString("warning_content").replaceAll("\\s+", "")
            baUserWarning.warning_content = content
          }
          baUserWarning
      }).coalesce(10)

        if (!baUserWarning.isEmpty()) {
          val baUserWarningDF: DataFrame = baUserWarning.toDF().cache()
//          hiveContext.sql("use test")
//          hiveContext.sql("create table if not exists ba_user_warning (id String,user_id String,warning_way String,warning_title String,warning_content String,review_flag String,create_time String,topic_name String)ROW FORMAT DELIMITED FIELDS TERMINATED BY '\\t'")
//          baUserWarningDF.write.mode(SaveMode.Append).insertInto("test.ba_user_warning")
          baUserWarningDF.saveToEs("ba_user_warning" + "/ba_user_warning")
        }

        //ba_interest
        val baInterest: RDD[BaAnalysisBean] = BatopicUtils.getBaseBeanRDD(rdd, "ba_interest").coalesce(10)
        if (!baInterest.isEmpty()) {
          val baInterestDF: DataFrame = baInterest.toDF().cache()
//          hiveContext.sql("use test")
//          hiveContext.sql("create table if not exists ba_interest (id String,site_id String,template_source String,uuid String,site_name String,site_url String,domain String,weight String,main_id String,site_cls String,enterprise_scale_cls String,customer_cls String,media_cls String,content_cls String,important_leavel String,industry_cls String,language_cls String,area_cls String,site_province String,site_city String,site_district String,schedule String,article_url String,code  String,article_html String,article_channel String,article_title String,article_author String,article_source String,article_pubdate String,article_view String,article_reply String,article_content String,article_like String,article_forword String,gmt_create String,nlp_emotion String,baseflow_hit_keywords String,baflow_id String,baflow_rule_id String,baflow_dest_table_name String,baflow_rule_src_table_name String,baflow_rule_hitwords String,baflow_rule_hitwords_weight String,user_id String,product_id String,product_name String,industry_id String,industry_name String,area_id String,area_name String,company_id String,company_name String,company_focus_flag String,person_id String,person_name String,person_focus_flag String,public_sentiment_type String,event_id String,event_name String,diy_risk_group_id String,diy_risk_group_name String,diy_risk_rule_id String,topic_deliver_flag String,ba_update_time String,reserve1 String,reserve2 String,reserve3 String,reserve4 String,reserve5 String,baseflow_type String,topic_name String)ROW FORMAT DELIMITED FIELDS TERMINATED BY '\\t'")
//          baInterestDF.write.mode(SaveMode.Append).insertInto("test.ba_interest")
          baInterestDF.saveToEs("ba_interest" + "/ba_interest")
        }

        //ba_hotspot
        val baHotspot: RDD[BaAnalysisBean] = BatopicUtils.getBaseBeanRDD(rdd, "ba_hotspot").coalesce(10)
        if (!baHotspot.isEmpty()) {
          val baHotspotDF: DataFrame = baHotspot.toDF().cache()
//          hiveContext.sql("use test")
//          hiveContext.sql("create table if not exists ba_hotspot (id String,site_id String,template_source String,uuid String,site_name String,site_url String,domain String,weight String,main_id String,site_cls String,enterprise_scale_cls String,customer_cls String,media_cls String,content_cls String,important_leavel String,industry_cls String,language_cls String,area_cls String,site_province String,site_city String,site_district String,schedule String,article_url String,code  String,article_html String,article_channel String,article_title String,article_author String,article_source String,article_pubdate String,article_view String,article_reply String,article_content String,article_like String,article_forword String,gmt_create String,nlp_emotion String,baseflow_hit_keywords String,baflow_id String,baflow_rule_id String,baflow_dest_table_name String,baflow_rule_src_table_name String,baflow_rule_hitwords String,baflow_rule_hitwords_weight String,user_id String,product_id String,product_name String,industry_id String,industry_name String,area_id String,area_name String,company_id String,company_name String,company_focus_flag String,person_id String,person_name String,person_focus_flag String,public_sentiment_type String,event_id String,event_name String,diy_risk_group_id String,diy_risk_group_name String,diy_risk_rule_id String,topic_deliver_flag String,ba_update_time String,reserve1 String,reserve2 String,reserve3 String,reserve4 String,reserve5 String,baseflow_type String,topic_name String)ROW FORMAT DELIMITED FIELDS TERMINATED BY '\\t'")
//          baHotspotDF.write.mode(SaveMode.Append).insertInto("test.ba_hotspot")
          baHotspotDF.saveToEs("ba_hotspot" + "/ba_hotspot")
        }

        //ba_topic_mining_article
        val baTopicMiningArticle: RDD[BaAnalysisBean] = BatopicUtils.getBaseBeanRDD(rdd, "ba_topic_mining_article").coalesce(10)
        if (!baTopicMiningArticle.isEmpty()) {
          val baTopicMiningArticleDF: DataFrame = baTopicMiningArticle.toDF().cache()
//          hiveContext.sql("use test")
//          hiveContext.sql("create table if not exists ba_topic_mining_article (id String,site_id String,template_source String,uuid String,site_name String,site_url String,domain String,weight String,main_id String,site_cls String,enterprise_scale_cls String,customer_cls String,media_cls String,content_cls String,important_leavel String,industry_cls String,language_cls String,area_cls String,site_province String,site_city String,site_district String,schedule String,article_url String,code  String,article_html String,article_channel String,article_title String,article_author String,article_source String,article_pubdate String,article_view String,article_reply String,article_content String,article_like String,article_forword String,gmt_create String,nlp_emotion String,baseflow_hit_keywords String,baflow_id String,baflow_rule_id String,baflow_dest_table_name String,baflow_rule_src_table_name String,baflow_rule_hitwords String,baflow_rule_hitwords_weight String,user_id String,product_id String,product_name String,industry_id String,industry_name String,area_id String,area_name String,company_id String,company_name String,company_focus_flag String,person_id String,person_name String,person_focus_flag String,public_sentiment_type String,event_id String,event_name String,diy_risk_group_id String,diy_risk_group_name String,diy_risk_rule_id String,topic_deliver_flag String,ba_update_time String,reserve1 String,reserve2 String,reserve3 String,reserve4 String,reserve5 String,baseflow_type String,topic_name String)ROW FORMAT DELIMITED FIELDS TERMINATED BY '\\t'")
//          baTopicMiningArticleDF.write.mode(SaveMode.Append).insertInto("test.ba_topic_mining_article")
          baTopicMiningArticleDF.saveToEs("ba_topic_mining_article" + "/ba_topic_mining_article")
        }

        //ba_user_relation_article
        val baUserRelationArticle: RDD[BaAnalysisBean] = BatopicUtils.getBaseBeanRDD(rdd, "ba_user_relation_article").coalesce(10)
        if (!baUserRelationArticle.isEmpty()) {
          val baUserRelationArticleDF: DataFrame = baUserRelationArticle.toDF().cache()
//          hiveContext.sql("use test")
//          hiveContext.sql("create table if not exists ba_user_relation_article (id String,site_id String,template_source String,uuid String,site_name String,site_url String,domain String,weight String,main_id String,site_cls String,enterprise_scale_cls String,customer_cls String,media_cls String,content_cls String,important_leavel String,industry_cls String,language_cls String,area_cls String,site_province String,site_city String,site_district String,schedule String,article_url String,code  String,article_html String,article_channel String,article_title String,article_author String,article_source String,article_pubdate String,article_view String,article_reply String,article_content String,article_like String,article_forword String,gmt_create String,nlp_emotion String,baseflow_hit_keywords String,baflow_id String,baflow_rule_id String,baflow_dest_table_name String,baflow_rule_src_table_name String,baflow_rule_hitwords String,baflow_rule_hitwords_weight String,user_id String,product_id String,product_name String,industry_id String,industry_name String,area_id String,area_name String,company_id String,company_name String,company_focus_flag String,person_id String,person_name String,person_focus_flag String,public_sentiment_type String,event_id String,event_name String,diy_risk_group_id String,diy_risk_group_name String,diy_risk_rule_id String,topic_deliver_flag String,ba_update_time String,reserve1 String,reserve2 String,reserve3 String,reserve4 String,reserve5 String,baseflow_type String,topic_name String)ROW FORMAT DELIMITED FIELDS TERMINATED BY '\\t'")
//          baUserRelationArticleDF.write.mode(SaveMode.Append).insertInto("test.ba_user_relation_article")
          baUserRelationArticleDF.saveToEs("ba_user_relation_article" + "/ba_user_relation_article")
        }

        //ba_article_label
        val baArticleLabel: RDD[BaArticleLabel] = rdd.filter(record => {
          val jsonObj: JSONObject = JSON.parseObject(record.value())
          val tName = jsonObj.getString("topic_name")
          "ba_article_label".equals(tName)
        }).map(record => {
          val gson = new Gson()
          val baArticleLabel: BaArticleLabel = gson.fromJson(record.value(), classOf[BaArticleLabel])
          baArticleLabel
        }).coalesce(10)

        if (!baArticleLabel.isEmpty()) {
          val baArticleLabelDF: DataFrame = baArticleLabel.toDF().cache()
//          hiveContext.sql("use test")
//          hiveContext.sql("create table if not exists ba_article_label (article_id String,area String,company_subject String,person_subject String,industry String,warning_words String,sensitive_words String,Descriptive_words String,update_time String,topic_name String)ROW FORMAT DELIMITED FIELDS TERMINATED BY '\\t'")
//          baArticleLabelDF.write.mode(SaveMode.Append).insertInto("test.ba_article_label")
          baArticleLabelDF.saveToEs("ba_article_label" + "/ba_article_label")
        }

        //ba_article_nlp_analysis
        val baArticleNlpAnalysis: RDD[BaArticleNlpAnalysisRes] = rdd.filter(record => {
          val jsonObj: JSONObject = JSON.parseObject(record.value())
          val tName = jsonObj.getString("topic_name")
          "ba_article_nlp_analysis".equals(tName)
        }).map(record => {
          val gson = new Gson()
          val baArticleNlpAnalysis: BaArticleNlpAnalysisRes = gson.fromJson(record.value(), classOf[BaArticleNlpAnalysisRes])
          baArticleNlpAnalysis
        }).coalesce(10)

        if (!baArticleNlpAnalysis.isEmpty()) {
          val baArticleNlpAnalysisDF: DataFrame = baArticleNlpAnalysis.toDF().cache()
//          hiveContext.sql("use test")
//          hiveContext.sql("create table if not exists ba_article_nlp_analysis (id String,user_id String,article_id String,hot_words String,sentiment_analysis String,summary String,money String,person String,organization String,area String,percent String,number String,nlp_time String,nlp_date String,score String,lda String,lda_context String,lda_sort String,clustering_type String,relation_words String,update_time String,topic_name String)ROW FORMAT DELIMITED FIELDS TERMINATED BY '\\t'")
//          baArticleNlpAnalysisDF.write.mode(SaveMode.Append).insertInto("test.ba_article_nlp_analysis")
          baArticleNlpAnalysisDF.saveToEs("ba_article_nlp_analysis" + "/ba_article_nlp_analysis")
        }

        //ba_subject_nlp_analysis
        val baSubjectNlpAnalysis: RDD[BaSubjectNlpRes] = rdd.filter(record => {
          val jsonObj: JSONObject = JSON.parseObject(record.value())
          val tName = jsonObj.getString("topic_name")
          "ba_subject_nlp_analysis".equals(tName)
        }).map(record => {
          val gson = new Gson()
          val baSubjectNlpAnalysis: BaSubjectNlpRes = gson.fromJson(record.value(), classOf[BaSubjectNlpRes])
          baSubjectNlpAnalysis
        }).coalesce(10)

        if (!baSubjectNlpAnalysis.isEmpty()) {
          val baSubjectNlpAnalysisDF: DataFrame = baSubjectNlpAnalysis.toDF().cache()
//          hiveContext.sql("use test")
//          hiveContext.sql("create table if not exists ba_subject_nlp_analysis (id String,user_id String,subject_id String,subject_name String,subject_type String,sentiment_analysis String,hot_words String,positive_word_cloud String,negative_word_cloud String,relation_graph String,update_time String,topic_name String)ROW FORMAT DELIMITED FIELDS TERMINATED BY '\\t'")
//          baSubjectNlpAnalysisDF.write.mode(SaveMode.Append).insertInto("test.ba_subject_nlp_analysis")
          baSubjectNlpAnalysisDF.saveToEs("ba_subject_nlp_analysis" + "/ba_subject_nlp_analysis")
        }

        //ba_risk_judge
        val baRiskJudge: RDD[BaRiskJudge] = rdd.filter(record => {
          val jsonObj: JSONObject = JSON.parseObject(record.value())
          val tName = jsonObj.getString("topic_name")
          "ba_risk_judge".equals(tName)
        }).map(record => {
          val gson = new Gson()
          val baRiskJudge: BaRiskJudge = gson.fromJson(record.value(), classOf[BaRiskJudge])
          baRiskJudge
        }).coalesce(10)

        if (!baRiskJudge.isEmpty()) {
          val baRiskJudgeDF: DataFrame = baRiskJudge.toDF().cache()
//          hiveContext.sql("use test")
//          hiveContext.sql("create table if not exists ba_risk_judge (id String,object_id String,object_name String,object_type String,risk_score String,risk_type String,risk_reason String,sentiment_analysis String,basis_articles String,created_time String,topic_name String)ROW FORMAT DELIMITED FIELDS TERMINATED BY '\\t'")
//          baRiskJudgeDF.write.mode(SaveMode.Append).insertInto("test.ba_risk_judge")
          baRiskJudgeDF.saveToEs("ba_risk_judge" + "/ba_risk_judge")
        }

      }

      //提交偏移量信息
      allData.asInstanceOf[CanCommitOffsets].commitAsync(offsetRanges)
    })

    ssc.start()
    ssc.awaitTermination()

  }

}
