package com.pw.study.realtime.app

import com.alibaba.fastjson.JSON
import com.pw.study.common.constants.TopicConstant
import com.pw.study.realtime.handle.{KafkaHandler, RedisHandler}
import org.apache.spark.streaming.kafka010.{CanCommitOffsets, HasOffsetRanges}
import org.apache.spark.streaming.{Seconds, StreamingContext}

object UserApp extends BaseAPP {
  appName = "userApp"
  groupName = "userApp"
  val topics = Array(TopicConstant.GMALL_USER_INFO)

  def main(args: Array[String]): Unit = {
    conf.setAppName(appName)
    context = new StreamingContext(conf, Seconds(10))
    runApp({
      val ds = KafkaHandler.getKafkaStream(topics, context, groupName)

      ds.foreachRDD(rdd => {
        if (!rdd.isEmpty()) {
          val ranges = rdd.asInstanceOf[HasOffsetRanges].offsetRanges
          println("消费数据条数：" + rdd.count())
          rdd.foreachPartition(partition => {
            val jedis = RedisHandler.getJedisClient()

            partition.foreach(record => {
              val map = JSON.parseObject(record.value())
              jedis.set("userinfo:" + map.getString("id"), record.value())
            })
            jedis.close()

          })
          ds.asInstanceOf[CanCommitOffsets].commitAsync(ranges)
        }

      })

    })
  }
}
