import java.util.UUID

import commons.conf.ConfigurationManager
import commons.constant.Constants
import commons.model.UserVisitAction
import commons.utils.{DateUtils, ParamUtils, StringUtils}
import net.sf.json.JSONObject
import org.apache.spark.SparkConf
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{SaveMode, SparkSession}

import scala.collection.mutable


object PageConvertStat {


  def main(args: Array[String]): Unit = {

    // 获取限制条件
    val jsonStr = ConfigurationManager.config.getString(Constants.TASK_PARAMS)
    val taskParam: JSONObject = JSONObject.fromObject(jsonStr)

    // 获取唯一主键
    val taskUUID: String = UUID.randomUUID().toString

    // 创建sparkconfg
    val sparkConf: SparkConf = new SparkConf().setMaster("local[*]").setAppName("pageConvert")
    val sparkSession: SparkSession = SparkSession.builder().config(sparkConf).enableHiveSupport().getOrCreate()
    //TODO: 需求五：页面单跳转化率统计
    // 1、获取指定时间范围内的用户行为数据
    val sessionId2ActionRDD: RDD[(String, UserVisitAction)] = getUserVisitAction(sparkSession, taskParam)

    // 2、拼装json对象的页面流限制数据
    val pageFlowStr: String = taskParam.getString(Constants.PARAM_TARGET_PAGE_FLOW)
    //[1,2,3,4,5,6,7]
    val pageFlowArray: Array[String] = pageFlowStr.split(",")
    //pageFlowArray.slice(0, pageFlowArray.length - 1)  => [1,2,3,4,5,6]
    // pageFlowArray.tail =>[2,3,4,5,6,7] 获取尾部 ，就是除了第一个元素的其他元素
    // zip 拉链，把对应位置的元素合并成一个tuple
    val targetPageSpilt: Array[String] = pageFlowArray.slice(0, pageFlowArray.length - 1).zip(pageFlowArray.tail).map {
      case (page1, page2) =>
        page1 + "_" + page2
    }

    // 3、将action数据按照sessionid聚合
    val session2ActionGroup: RDD[(String, Iterable[UserVisitAction])] = sessionId2ActionRDD.groupByKey()

    val pageSplitNumRDD = session2ActionGroup.flatMap {
      case (sessionId, actionIterable) =>
        val sortList: List[UserVisitAction] = actionIterable.toList.sortWith((item1, item2) => {
          DateUtils.parseTime(item1.action_time).getTime < DateUtils.parseTime(item2.action_time).getTime
        })

        // 获取每个session的 pageid =》[1,2,3,4,5,6]
        val pageIdList: List[Long] = sortList.map(item => item.page_id)

        // 拼接 每个session的页面转换路径 =》 [1_2,2_3,3_4,4_5]
        val pageSplit: List[String] = pageIdList.slice(0, pageIdList.length - 1).zip(pageIdList.tail).map {
          case (page1, page2) => page1 + "_" + page2
        }
        //过滤符合限制条件的页面转换率的key
        val pageFilter: List[String] = pageSplit.filter {
          case pageSplit => targetPageSpilt.contains(pageSplit)
        }

        // 转换格式
        val pageSplitCount: List[(String, Long)] = pageFilter.map(item => (item, 1L))
        pageSplitCount
    }
    pageSplitNumRDD.foreach(println(_))

    // 获得符合过滤条件的page的数量(1_2,1)
    val pageSplitCountMap: collection.Map[String, Long] = pageSplitNumRDD.countByKey()

    // 获得初始页面
    val startPage: Long = pageFlowArray(0).toLong

    // 从所有session中获取 有pageId为 startPage的 session,
    val startPageCount: Long = sessionId2ActionRDD.filter {
      case (sessionId, action) => action.page_id == startPage
    }.count()

    getPageConvert(sparkSession, taskUUID, targetPageSpilt, startPageCount, pageSplitCountMap)

  }


  /**
   *
   * @param sparkSession
   * @param taskUUID
   * @param targetPageSpilt 限制页面切片
   * @param startPageCount
   * @param pageSplitCountMap 聚合后的页面切片的信息（"1_2",500）
   */
  def getPageConvert(sparkSession: SparkSession,
                     taskUUID: String,
                     targetPageSpilt: Array[String],
                     startPageCount: Long,
                     pageSplitCountMap: collection.Map[String, Long]): Unit = {

    val pageSplitRatio = new mutable.HashMap[String, Double]()
    var lastPageCount: Double = startPageCount.toDouble

    // 迭代每个页切片
    for (pageSplit <- targetPageSpilt) {
      // 通过限制的页面切片作为key，获取这个key的访问总数
      val concurrentPageSplitCount = pageSplitCountMap.get(pageSplit).get.toDouble

      val ration: Double = concurrentPageSplitCount / lastPageCount

      pageSplitRatio.put(pageSplit,ration)
      lastPageCount = concurrentPageSplitCount
    }

    val convertStr: String = pageSplitRatio.map {
      case (pageSplit, ration) => pageSplit + "=" + ration
    }.mkString("|")


    val pageSplit: PageSplitConvertRate = PageSplitConvertRate(taskUUID, convertStr)

    // pageSplit是一个对象，需要包裹在array当中转换成rdd
    val pageSplitRationRDD: RDD[PageSplitConvertRate] = sparkSession.sparkContext.makeRDD(Array(pageSplit))

    import sparkSession.implicits._

    pageSplitRationRDD.toDF().write
      .format("jdbc")
      .option("url", ConfigurationManager.config.getString(Constants.JDBC_URL))
      .option("user", ConfigurationManager.config.getString(Constants.JDBC_USER))
      .option("password", ConfigurationManager.config.getString(Constants.JDBC_PASSWORD))
      .option("dbtable", "page_split_conbert_rate_0328")
      .mode(SaveMode.Append)
      .save()

  }



  // 获取指定时间范围内的用户行为数据
  def getUserVisitAction(sparkSession: SparkSession, taskParam: JSONObject) = {
    val startDate: String = ParamUtils.getParam(taskParam, Constants.PARAM_START_DATE)
    val endDate: String = taskParam.getString(Constants.PARAM_END_DATE)

    val sql: String = "select * from user_visit_action where date between '" + startDate + "'and'" + endDate + "'"
    import sparkSession.implicits._

    sparkSession.sql(sql).as[UserVisitAction].rdd.map(item => (item.session_id, item))

  }
}

