package com.atguigu.app

import com.alibaba.fastjson.JSON
import com.atguigu.bean.UserInfo
import com.atguigu.common.logger.GmallConstants
import com.atguigu.utils.{MykafkaUtil, RedisUtil}
import org.apache.spark.SparkConf
import org.apache.spark.streaming.{Seconds, StreamingContext}
import redis.clients.jedis.Jedis

/**
 * 统计每日用户新增
 * spark把mysql中用户修改、新增user_info数据更新到redis
 *
 * @author WangJX
 * @date 2019/12/3 19:25 
 * @version 1.0
 */
object UserApp {
  def main(args: Array[String]): Unit = {
    val conf: SparkConf = new SparkConf().setAppName("UserApp")
      .set("spark.streaming.kafka.maxRatePerPartition", "100")
      .set("spark.streaming.backpressure.enabled", "true")
      .set("spark.streaming.stopGracefullyOnShutdown", "true")
      //.setMaster("local[*]")

    val ssc = new StreamingContext(conf, Seconds(5))

    //通过canal解析Mysql中的新增和修改的用户数据对其进行解析
    val user_infoDstream = MykafkaUtil.getKafkaStream(GmallConstants.KAFKA_TOPIC_NEW_USER, ssc)
      .map(_.value())
      .foreachRDD{
        rdd => {
          rdd.foreachPartition{
            str => {
              val jedis: Jedis = RedisUtil.getJedisPool()
              for (elem <- str) {
                val userInfo: UserInfo = JSON.parseObject(elem, classOf[UserInfo])
                //把修改的user数据放入redis的Hash中，考虑到用户在同一时间连续修改
                //后续考虑定期同步数据到mysql中
                jedis.hset(GmallConstants.REDIS_USER_INFO, userInfo.id, elem)
                println("更新的用户：" + elem)
              }
              jedis.close()
            }
          }
        }
      }



    ssc.start()

    ssc.awaitTermination()
  }
}


















