package com.raylu.app

import com.alibaba.fastjson
import com.alibaba.fastjson.{JSON, JSONArray}
import com.raylu.util.{MyKafkaUtil, OffsetManagerUtil, PropertiesUtil, RedisUtil}
import org.apache.kafka.clients.consumer.ConsumerRecord
import org.apache.kafka.common.TopicPartition
import org.apache.spark.SparkConf
import org.apache.spark.broadcast.Broadcast
import org.apache.spark.streaming.dstream.{DStream, InputDStream}
import org.apache.spark.streaming.kafka010.{HasOffsetRanges, OffsetRange}
import org.apache.spark.streaming.{Seconds, StreamingContext}
import redis.clients.jedis.Jedis

import java.util
import java.util.Properties
import scala.util.parsing.json.JSONObject

/**
 *
 * Description:
 *
 * Create by lucienoz on 2021/11/29.
 * Copyright © 2021 lucienoz. All rights reserved.
 */
object BaseDBApp {

	def main(args : Array[String]) : Unit = {

		val sparkConf : SparkConf = new SparkConf ().setAppName ( "base_db_app" ).setMaster ( "local[*]" )
		val ssc = new StreamingContext ( sparkConf, Seconds ( 5 ) )

		val properties : Properties = PropertiesUtil.load ( "config.properties" )
		val topic = properties.getProperty ( "kafka.db.topic" )
		val groupId = "base_db_group"
		val offset : Map[TopicPartition, Long] = OffsetManagerUtil.getOffset ( topic, groupId )

		var KafkaDStream : InputDStream[ConsumerRecord[String, String]] = null
		if (offset.isEmpty && offset == null) KafkaDStream = MyKafkaUtil.getKafkaStreamOfConsumer ( topic, ssc, groupId )
		else KafkaDStream = MyKafkaUtil.getKafkaStreamOfConsumer ( topic, ssc, groupId, offset )

		var offsetRanges : Array[OffsetRange] = null
		val KafkaDStream2 : DStream[ConsumerRecord[String, String]] = KafkaDStream.transform { RDD =>
			offsetRanges = RDD.asInstanceOf[HasOffsetRanges].offsetRanges
			RDD
		}

		KafkaDStream2.map { record => {
			val str : String = record.value ()
			JSON.parseObject ( str )
		}
		}.foreachRDD {
			RDD =>
				val jedis : Jedis = RedisUtil.getJedisFromPool ()
				jedis.select ( properties.getProperty ( "redis.dimtable.db" ).toInt )
				val dimTableSet : util.Set[String] = jedis.smembers ( "dimtable" )
				val dimTablesBC : Broadcast[util.Set[String]] = ssc.sparkContext.broadcast ( dimTableSet )
				jedis.select ( properties.getProperty ( "redis.facttable.db" ).toInt )
				val factTable : util.Set[String] = jedis.smembers ( "facttable" )
				val factTablesBC : Broadcast[util.Set[String]] = ssc.sparkContext.broadcast ( factTable )
				jedis.close ()
				RDD.foreachPartition { jSONObjIter =>

					val jedisClient : Jedis = RedisUtil.getJedisFromPool ()
					jedisClient.select ( properties.getProperty ( "redis.dimtable.db" ).toInt )
					for (jSONObj <- jSONObjIter) {
						//分流
						//dim --> redis
						//fact --> kafka
						val tableName : String = jSONObj.getString ( "table" )
						val pkKey : String = jSONObj.getString ( "id" )

						val opt : String =
							if (jSONObj.getString ( "type" ) == "UPDATE") "U"
							else if (jSONObj.getString ( "type" ) == "INSERT") "I"
							else null
						if (opt != null) {

							val dataJSONObjArr : JSONArray = jSONObj.getJSONArray ( "data" )

							//dim to redis
							if (dimTablesBC.value.contains ( tableName )) {
								for (i <- 0 until dataJSONObjArr.size ()) {
									val dataJSONObj : fastjson.JSONObject = dataJSONObjArr.getJSONObject ( i )
									jedisClient.set ( s"DIM:${tableName}:${pkKey}", dataJSONObj.toJSONString )
								}
							}

							//fact to kafka
							println(s"tableName==>${tableName}")
							if(factTablesBC.value.contains(tableName)){
								for (i <- 0 until dataJSONObjArr.size ()) {
									val dataJSONObj : fastjson.JSONObject = dataJSONObjArr.getJSONObject ( i )
									MyKafkaUtil.send ( s"DWD_${tableName.toUpperCase}_${opt}", pkKey,dataJSONObj.toJSONString )
								}

							}

						}

					}

					jedisClient.close()
					MyKafkaUtil.flush()
				}

				OffsetManagerUtil.saveOffset ( topic, groupId, offsetRanges )

		}
		ssc.start()
		ssc.awaitTermination()


	}

}
