package com.atguigu.gmall.apps
import com.alibaba.fastjson.JSON
import com.atguigu.gmall.Beans.UserInfo
import com.atguigu.gmall.utils.Constant.KafkaConstant
import com.atguigu.gmall.utils.{MykafkaUtil, RedisUtil}
import com.google.gson.Gson
import org.apache.kafka.clients.consumer.ConsumerRecord
import org.apache.spark.streaming.dstream.{DStream, InputDStream}
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.{SparkConf, SparkContext}
import redis.clients.jedis.Jedis

/**
  * @ClassName: userinfoApp
  * @Description:
  * @Author: kele
  * @Date: 2021/3/28 9:17
  **/
object UserinfoApp {

  def main(args: Array[String]): Unit = {

    val conf = new SparkConf().setMaster("local[4]").setAppName("userinfo")

    val sc = new StreamingContext(conf,Seconds(5))

    val ui: InputDStream[ConsumerRecord[String, String]] = MykafkaUtil.getKafkaStream(KafkaConstant.KAFKA_TOPIC_USER_INFO,sc)

    /*val ui1: DStream[(String, UserInfo.type)] = ui.map(recoed => {

      val userInfo: UserInfo = JSON.parseObject(recoed.value(), classOf[UserInfo])

      (userInfo.id, UserInfo)
    })


   ui1.foreachRDD(iter=>{

     iter.foreachPartition(record=>{

       val client: Jedis = RedisUtil.getJedisClient
       val gson = new Gson()

       record.foreach{

         case(id,userinfo)=>{

           client.set("userinfo"+id,gson.toJson(userinfo))

         }
       }

     })


   })*/

    ui.foreachRDD(rdd=>{

      rdd.foreachPartition(record=>{
        //获取redis连接
        val client: Jedis = RedisUtil.getJedisClient

        record.foreach(userinfo=>{

          val userInfo: UserInfo = JSON.parseObject(userinfo.value(),classOf[UserInfo])

          client.set("userinfo"+userInfo.id,userinfo.value())

        })

        client.close()

      })


    })

    sc.start()
    sc.awaitTermination()

  }
}
