package com.raylu.app

import com.alibaba.fastjson.JSON
import com.raylu.bean.UserInfo
import com.raylu.util
import com.raylu.util.{MyEsUtil, MyKafkaUtil, OffsetManagerUtil, PropertiesUtil}
import org.apache.kafka.clients.consumer.ConsumerRecord
import org.apache.kafka.common.TopicPartition
import org.apache.spark.SparkConf
import org.apache.spark.streaming.dstream.{DStream, InputDStream}
import org.apache.spark.streaming.kafka010.{HasOffsetRanges, OffsetRange}
import org.apache.spark.streaming.{Seconds, StreamingContext}

import java.lang
import java.sql.Timestamp
import java.util.Properties

/**
 *
 * Description:
 *
 * Create by lucienoz on 2021/12/11.
 * Copyright © 2021 lucienoz. All rights reserved.
 */
object DwUserInfoApp {
	def main(args : Array[String]) : Unit = {
		//将新增的User数据放入ES
		val sparkConf : SparkConf = new SparkConf ().setAppName ( "dws_userinfo_app" ).setMaster ( "local[*]" )

		val ssc : StreamingContext = new StreamingContext ( sparkConf, Seconds ( 5 ) )
		val properties : Properties = PropertiesUtil.load ( "config.properties" )
		val topic : String = properties.getProperty ( "kafka.userinfo.topic" )
		val groupId : String = "dw_userinfo_group"

		val offset : Map[TopicPartition, Long] = OffsetManagerUtil.getOffset ( topic, groupId )

		var KafkaDStream : InputDStream[ConsumerRecord[String, String]] = null
		if (offset.isEmpty && offset == null) KafkaDStream = MyKafkaUtil.getKafkaStreamOfConsumer ( topic, ssc, groupId )
		else KafkaDStream = MyKafkaUtil.getKafkaStreamOfConsumer ( topic, ssc, groupId, offset )

		var offsetRanges : Array[OffsetRange] = null
		val KafkaDStream2 : DStream[ConsumerRecord[String, String]] = KafkaDStream.transform { RDD =>
			offsetRanges = RDD.asInstanceOf[HasOffsetRanges].offsetRanges
			RDD
		}

		KafkaDStream2.map ( record => {
			val userInfo : UserInfo = JSON.parseObject ( record.value (), classOf[UserInfo] )
			val timestamp : Timestamp = new Timestamp ( new lang.Long ( userInfo.create_time ) )
			userInfo.create_time = timestamp.toString
			(userInfo.id, userInfo)
		} ).foreachRDD {
			RDD =>
				RDD
					.groupBy ( userInfoTuple => userInfoTuple._2.create_time.split ( " " )( 0 ) )
					.foreachPartition ( keyUserInfoInfo =>
						for ((createDt, userInfoIter) <- keyUserInfoInfo) {
							val flag : String = MyEsUtil.bulkSave ( "dws_user_info_" + createDt, userInfoIter.toList )
							println ( "ES===>" + flag )
						}
					)
				OffsetManagerUtil.saveOffset(topic, groupId, offsetRanges)
		}


		ssc.start ()
		ssc.awaitTermination ()
	}
}
