package cn.tecnova.cd

import java.util.UUID

import cn.tecnova.utils.ConfigHandler
import org.apache.kafka.clients.producer.{KafkaProducer, ProducerRecord}
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.SQLContext
import org.apache.spark.{SparkConf, SparkContext}
import org.elasticsearch.spark.rdd.EsSpark

/**
  * description:
  * Rabcheng
  * Date:2019/6/20 15:26
  **/
object Base2Kafka {


  def main(args: Array[String]): Unit = {


    val conf = new SparkConf()
      .setAppName("base2Kafka")
      //      .setMaster("local[*]")
      .set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
      .registerKryoClasses(Array(classOf[Base]))
      .set("es.nodes", "172.17.22.16,172.17.22.15,172.17.22.17")
      .set("es.port", "9600")
    val sc = new SparkContext(conf)

    val sQLContext = new SQLContext(sc)


    val rdd: RDD[(String, String)] = EsSpark.esJsonRDD(sc, "base/zhangguoao")

    /*rdd.foreachPartition(iter => {
//      val pool = new JedisPool("172.17.22.18", 8014)
      val pool = new JedisPool(new GenericObjectPoolConfig, "172.17.22.18", 8014, 1000, "123456", 0)
      val jedis = pool.getResource
      iter.foreach(tp => {
        println(tp._2)
        jedis.rpush("base", tp._2)
        println("写入成功...")
      })

    })*/

    rdd.foreachPartition(iter=>{

      val productor: KafkaProducer[String, String] = new KafkaProducer[String, String](ConfigHandler.kafkaProps)
      iter.foreach(tp=>{
        productor.send(new ProducerRecord[String, String]("CDBase",UUID.randomUUID().toString.replaceAll("-", ""), tp._2))
      })

    })


//    res.toDF().write.mode(SaveMode.Append).jdbc("jdbc:mysql://www.slave4.com:3306/tec_base2", "base2", ConfigHandler.props2)



    sc.stop()

  }

}
