package cn.tecnova.test

import cn.tecnova.utils.ConfigHandler
import org.apache.kafka.clients.consumer.ConsumerRecord
import org.apache.log4j.{Level, Logger}
import org.apache.spark.SparkConf
import org.apache.spark.streaming.dstream.InputDStream
import org.apache.spark.streaming.kafka010.{ConsumerStrategies, KafkaUtils, LocationStrategies}
import org.apache.spark.streaming.{Seconds, StreamingContext}

/**
  * description:将mysql中相关的词库存到kafka
  **/
object KafkaConsumer {

  Logger.getLogger("org").setLevel(Level.ERROR)

  def main(args: Array[String]): Unit = {

    //sparkconf
    val conf = new SparkConf
    conf.setMaster("local[*]")
    conf.setAppName(this.getClass.getSimpleName)

    val ssc = new StreamingContext(conf,Seconds(2))

    val stream: InputDStream[ConsumerRecord[String, String]] = KafkaUtils.createDirectStream(
      ssc,
      LocationStrategies.PreferConsistent,
      ConsumerStrategies.Subscribe(Array("nlp_article_emotion"), ConfigHandler.kafkaParams("g_base11"))
    )

    stream.foreachRDD(rdd=>{

      rdd.foreachPartition(iter=>{

        iter.foreach(println)

      })

    })

    ssc.start()
    ssc.awaitTermination()

  }

}
