package me.chendan.streaming

import kafka.serializer.StringDecoder
import org.apache.spark.SparkConf
import org.apache.spark.streaming.kafka.KafkaUtils
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.json4s._
import org.json4s.native.JsonMethods._
import redis.clients.jedis.{JedisPool, JedisPoolConfig}

/**
  * Created by chendan on 16-5-4.
  */
class  UserConsumeCalculator {

}

object UserConsumeCalculator extends App {
  val conf = new SparkConf().setAppName("UserConsumeCalculator").setMaster("local[2]")
  val sc = new StreamingContext(conf, Seconds(5))
  sc.checkpoint("/tmp")
  val topics = Set("UserConsume")
  val brokers = "localhost:9092"
  val kafkaParams = Map[String, String]("metadata.broker.list" -> brokers, "serializer.class" -> "kafka.serializer.StringEncoder")
  val kafkaStream = KafkaUtils.createDirectStream[String, String, StringDecoder, StringDecoder](sc, kafkaParams, topics)
  case class User(user: String, payment: Double)
  val datas = kafkaStream.map(line => {
    implicit val formats = DefaultFormats
    val data = parse(line._2)
    val user = data.extract[User]
    (user.user, user.payment)
  }).reduceByKey(_ + _)



  datas.foreachRDD(rdd => {
    rdd.foreachPartition(partitionOfRecords => {
      partitionOfRecords.foreach(pair => {
        val pool = new JedisPool(new JedisPoolConfig(), "localhost")
        val name = pair._1
        val payment = pair._2
        val jedis = pool.getResource
        jedis.select(0)
        jedis.set(name, payment.toString)
      })
    })
  })

  sc.start()
  sc.awaitTermination()

}
