import java.util.UUID

import com.atguigu.commons.conf.ConfigurationManager
import com.atguigu.commons.constant.Constants
import com.atguigu.commons.model.UserVisitAction
import com.atguigu.commons.utils.{DateUtils, ParamUtils}
import net.sf.json.JSONObject
import org.apache.spark.SparkConf
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{SaveMode, SparkSession}

import scala.collection.mutable


object PageConvertStat {

	def main(args: Array[String]): Unit = {
		// 获取筛选条件
		val jsonStr: String = ConfigurationManager.config.getString(Constants.TASK_PARAMS)

		// 获取筛选条件对应的jsonStr
		val taskParam: JSONObject = JSONObject.fromObject(jsonStr)
		// 创建全局唯一主键
		val taskUUID: String = UUID.randomUUID().toString
		// 创建sparkConf
		val sparkConf: SparkConf = new SparkConf().setAppName("session").setMaster("local[*]")
		// 创建sparksession 包含sparkcontext
		val sparkSession = SparkSession.builder().config(sparkConf).enableHiveSupport().getOrCreate()

		// 获取原始的动作表信息 -- 筛选数据
		val session2ActionRDD: RDD[(String, UserVisitAction)] = getOrActionRDD(sparkSession, taskParam)

		val pageFlowStr: String = ParamUtils.getParam(taskParam, Constants.PARAM_TARGET_PAGE_FLOW)

		val pageFlowArray: Array[Long] = pageFlowStr.split(",").map(_.toLong)

		// 获取字段
		val targetPageSplit: Array[String] = pageFlowArray.slice(0, pageFlowArray.length - 1).zip(pageFlowArray.tail).map {
			case (page1, page2) => page1 + "_" + page2
		}

		val sessionId2GroupRDD: RDD[(String, Iterable[UserVisitAction])] = session2ActionRDD.groupByKey

		// (pageId, 1L)
		val pageSplitNumRDD: RDD[(String, Long)] = sessionId2GroupRDD.flatMap {
			case (sessionId, iterableAction) =>
				val sortList: List[UserVisitAction] = iterableAction.toList.sortWith((item1, item2) =>
					DateUtils.parseTime(item1.action_time).getTime < DateUtils.parseTime(item2.action_time).getTime
				)

				val pageList: List[Long] = sortList.map {
					case action => action.page_id
				}

				val pageSplit: List[String] = pageList.slice(0, pageList.length - 1).zip(pageList.tail).map {
					case (page1, page2) => page1 + "_" + page2
				}

				val pageSplitFilter: List[String] = pageSplit.filter {
					case pageSplit => targetPageSplit.contains(pageSplit)
				}

				pageSplitFilter.map {
					case pageSplit => (pageSplit, 1L)
				}
		}

		// 计算每个单挑的总数
		val pageSplitCountMap: collection.Map[String, Long] = pageSplitNumRDD.countByKey()

		val startPage: Long = pageFlowArray(0)

		// 这是访问第一个页面的session的总和
		val startPageCount = session2ActionRDD.filter {
			case (sessionId, action) => action.page_id == startPage
		}.count

		getPageConvert(sparkSession, taskUUID, targetPageSplit, startPageCount, pageSplitCountMap)
	}


	def getPageConvert(sparkSession: SparkSession, taskUUID: String,
	                   targetPageSplit: Array[String],
	                   startPageCount: Long,
	                   pageSplitCountMap: collection.Map[String, Long]) = {

		val pageSplitRatio = new mutable.HashMap[String, Double]()

		var lastPageCount = startPageCount.toDouble

		for (pageSplit <- targetPageSplit) {
			// 1_2 的count
			val currentPageSplitCount = pageSplitCountMap.get(pageSplit).get.toDouble
			val ratio = currentPageSplitCount / lastPageCount
			pageSplitRatio.put(pageSplit, ratio)
			lastPageCount = currentPageSplitCount
		}

		val convertStr: String = pageSplitRatio.map {
			case (pageSplit, ratio) =>
				pageSplit + "=" + ratio
		}.mkString("|")

		val pageSplit: PageSplitConvertRate = PageSplitConvertRate(taskUUID, convertStr)
		val pageSplitRatioRDD: RDD[PageSplitConvertRate] = sparkSession.sparkContext.makeRDD(Array(pageSplit))

		import sparkSession.implicits._
		pageSplitRatioRDD.toDF().write
				.format("jdbc")
				.option("url", ConfigurationManager.config.getString(Constants.JDBC_URL))
				.option("user", ConfigurationManager.config.getString(Constants.JDBC_USER))
				.option("password", ConfigurationManager.config.getString(Constants.JDBC_PASSWORD))
				.option("dbtable", "page_split_convert_rate_0308")
				.mode(SaveMode.Append)
				.save()
	}


	// 筛选信息
	def getOrActionRDD(sparkSession: SparkSession, taskParam: JSONObject) = {
		//设置 startDate endDate -- 限制条件
		val startDate: String = ParamUtils.getParam(taskParam, Constants.PARAM_START_DATE)
		val endDate: String = ParamUtils.getParam(taskParam, Constants.PARAM_END_DATE)
		import sparkSession.implicits._

		val sql = "select * from user_visit_action where date>='" + startDate + "' and date<='" + endDate + "'"
		sparkSession.sql(sql).as[UserVisitAction].rdd.map(item => (item.session_id, item))
	}
}
