import java.util.{Date, Random, UUID}

import com.atguigu.commons.conf.ConfigurationManager
import com.atguigu.commons.constant.Constants
import com.atguigu.commons.model.{UserInfo, UserVisitAction}
import com.atguigu.commons.utils.{DateUtils, NumberUtils, ParamUtils, StringUtils, ValidUtils}
import net.sf.json.JSONObject
import org.apache.spark.SparkConf
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{SaveMode, SparkSession}

import scala.collection.mutable
import scala.collection.mutable.{ArrayBuffer, ListBuffer}


object SessionStat {


	def main(args: Array[String]): Unit = {
		// 获取筛选条件
		val jsonStr: String = ConfigurationManager.config.getString(Constants.TASK_PARAMS)
		// 获取筛选条件对应的jsonStr
		val taskParam: JSONObject = JSONObject.fromObject(jsonStr)
		// 创建全局唯一主键
		val taskUUID: String = UUID.randomUUID().toString
		// 创建sparkConf
		val sparkConf: SparkConf = new SparkConf().setAppName("session").setMaster("local[*]")
		// 创建sparksession 包含sparkcontext
		val sparkSession = SparkSession.builder().config(sparkConf).enableHiveSupport().getOrCreate()

		// 获取原始的动作表信息 -- 筛选数据
		var actionRDD: RDD[UserVisitAction] = getOrActionRDD(sparkSession, taskParam)
		//actionRDD.foreach(println(_))

		// sessionId2ActionRDD:RDD[(item.session_id, item)]
		val sessionId2ActionRDD: RDD[(String, UserVisitAction)] = actionRDD.map(item => (item.session_id, item))
		sessionId2ActionRDD.foreach(println)

		// 通过 sessionID 进行分组...
		// RDD[String,CompactBuffer] -- session : UserVisitAction -- 1:n
		val session2GroupActionRDD: RDD[(String, Iterable[UserVisitAction])] = sessionId2ActionRDD.groupByKey
		//session2GroupActionRDD.foreach(println)
		session2GroupActionRDD.cache

		// 根据每一条信息进行聚合  RDD[sessionID, fullInfo]
		val sessionId2FullInfoRDD: RDD[(String, String)] = getSessionFullInfo(sparkSession, session2GroupActionRDD)
		//sessionId2FullInfoRDD.foreach(println)

		// 创建累加器对象
		val sessionAccumlator: SessionAccumlator = new SessionAccumlator

		// 注册累加器
		sparkSession.sparkContext.register(sessionAccumlator)

		// 过滤数据
		val sessionId2FilterRDD: RDD[(String, String)] = getSessionFilteredRDD(taskParam, sessionId2FullInfoRDD, sessionAccumlator)
		// 必须要有行动算子.. 程序才能执行成功
		//sessionId2FilterRDD.foreach(println)

		// 最终写入数据库
		//getSessionRatio(sparkSession, taskUUID, sessionAccumlator.value)


		// 需求二: 按比率 随机抽样
		//sessionRadomExtract(sparkSession, taskUUID, sessionId2FilterRDD)

		// 需求三: 获取过滤后的信息
		val sessionId2FilterActionRDD: RDD[(String, UserVisitAction)] = sessionId2ActionRDD.join(sessionId2FilterRDD).map {
			case (sessionId, (action, fullInfo)) =>
				(sessionId, action)
		}

		// 需求三: Top10 热门商品
		val top10CategoryArray: Array[(SortKey, String)] = top10PopularCategories(sparkSession, taskUUID, sessionId2FilterActionRDD)

		// 需求四:Top10活跃Session  top10 产品的  的 top10 session
		top10Category2Session(sparkSession, taskUUID, sessionId2FilterActionRDD, top10CategoryArray)

	}


	def top10Category2Session(sparkSession: SparkSession,
	                          taskUUID: String,
	                          sessionId2FilterActionRDD: RDD[(String, UserVisitAction)],
	                          top10CategoryArray: Array[(SortKey, String)]) = {
		val top10Cid: Array[Long] = top10CategoryArray.map {
			case (sortKey, fullInfo) =>
				val cid: Long = StringUtils.getFieldFromConcatString(fullInfo, "\\|", Constants
						.FIELD_CATEGORY_ID).toLong
				cid
		}

		val cid2SessionId: RDD[(String, Long)] = sessionId2FilterActionRDD.map(item => (item._1, item._2.click_category_id))

		val session2CidFilterRDD: RDD[(String, Long)] = cid2SessionId.filter {
			case (sessionId, cid) =>
				top10Cid.contains(cid)
		}

		val session2CidGroupRDD: RDD[(String, Iterable[Long])] = session2CidFilterRDD.groupByKey

		val cid2SessionCountRDD: RDD[(Long, String)] = session2CidGroupRDD.flatMap {
			case (sessionId, iterableAction) =>
				val categoryCountMap: mutable.HashMap[Long, Long] = new mutable.HashMap[Long, Long]()

				for (cid <- iterableAction) {
					if (!categoryCountMap.contains(cid))
						categoryCountMap += (cid -> 0)
					categoryCountMap.update(cid, categoryCountMap(cid) + 1)
				}

				for ((cid, count) <- categoryCountMap)
					yield (cid, sessionId + "=" + count)
		}

		val top10SessionRDD = cid2SessionCountRDD.groupByKey().flatMap {
			case (cid, iterableAction) =>
				val sortList: List[String] = iterableAction.toList.sortWith {
					(item1, item2) =>
						item1.split("=")(1).toLong > item2.split("=")(1).toLong
				}.take(10)

				sortList.map {
					case item =>
						val sessionId = item.split("=")(0)
						val count = item.split("=")(1).toLong
						Top10Session(taskUUID,cid,sessionId,count)
				}
		}

		import sparkSession.implicits._
		top10SessionRDD.toDF().write
				.format("jdbc")
				.option("url", ConfigurationManager.config.getString(Constants.JDBC_URL))
				.option("user", ConfigurationManager.config.getString(Constants.JDBC_USER))
				.option("password", ConfigurationManager.config.getString(Constants.JDBC_PASSWORD))
				.option("dbtable", "top10_session_0308")
				.mode(SaveMode.Append)
				.save()
	}


	// 获取 每个商品的 clik 的点击总数
	def getClikCount(sessionId2FilterActionRDD: RDD[(String, UserVisitAction)]) = {
		val clickFilterRDD = sessionId2FilterActionRDD.filter(item => item._2.click_category_id != -1L)

		clickFilterRDD.map(item => (item._2.click_category_id, 1L)).reduceByKey(_ + _)
	}


	// 获取 每个商品的 Order 的点击总数
	def getOrderCount(sessionId2FilterActionRDD: RDD[(String, UserVisitAction)]) = {
		val orderFilterRDD = sessionId2FilterActionRDD.filter(item => item._2.order_category_ids != null)

		orderFilterRDD.flatMap {
			case (sessionId, action) =>
				action.order_category_ids.split(",").map(item => (item.toLong, 1L))
		}.reduceByKey(_ + _)
	}


	// 获取 每个商品的 Pay 的点击总数
	def getPayCount(sessionId2FilterActionRDD: RDD[(String, UserVisitAction)]) = {
		val payFilterRDD = sessionId2FilterActionRDD.filter(item => item._2.pay_category_ids != null)

		payFilterRDD.flatMap {
			case (sessionId, action) =>
				action.pay_category_ids.split(",").map(item => (item.toLong, 1L))
		}.reduceByKey(_ + _)
	}


	def getFullCount(cid2CidRDD: RDD[(Long, Long)],
	                 cid2ClickCountRDD: RDD[(Long, Long)],
	                 cid2OrderCountRDD: RDD[(Long, Long)],
	                 cid2PayCountRDD: RDD[(Long, Long)]) = {
		val cid2ClickInfoRDD = cid2CidRDD.leftOuterJoin(cid2ClickCountRDD).map {
			case (cid, (categoryId, option)) =>
				val clickCount = if (option.isDefined) option.get else 0
				val aggrCount = Constants.FIELD_CATEGORY_ID + "=" + cid + "|" +
						Constants.FIELD_CLICK_COUNT + "=" + clickCount
				(cid, aggrCount)
		}

		val cid2OrderInfoRDD: RDD[(Long, String)] = cid2ClickInfoRDD.leftOuterJoin(cid2OrderCountRDD).map {
			case (cid, (clickInfo, option)) =>
				val orderCount = if (option.isDefined) option.get else 0
				val aggrInfo = clickInfo + "|" +
						Constants.FIELD_ORDER_COUNT + "=" + orderCount
				(cid, aggrInfo)
		}

		cid2OrderInfoRDD.leftOuterJoin(cid2PayCountRDD).map {
			case (cid, (clickInfo, option)) =>
				val payCount = if (option.isDefined) option.get else 0
				val aggrInfo = clickInfo + "|" +
						Constants.FIELD_PAY_COUNT + "=" + payCount
				(cid, aggrInfo)
		}
	}


	// 三: 统计top10 商品
	def top10PopularCategories(sparkSession: SparkSession,
	                           taskUUID: String,
	                           sessionId2FilterActionRDD: RDD[(String, UserVisitAction)]) = {
		// 1. 获取所有的点击数和订单数
		var cid2CidRDD: RDD[(Long, Long)] = sessionId2FilterActionRDD.flatMap {
			case (sessionId, action) =>
				// 之所以 使用 long long 是为了方便以后的join  -- 应为join 需要 key
				val categoryBuffer: ArrayBuffer[(Long, Long)] = new ArrayBuffer[(Long, Long)]()

				if (action.click_category_id != -1L) {
					categoryBuffer.append((action.click_category_id, action.click_category_id))
				} else if (action.order_category_ids != null) {
					for (orderCid <- action.order_category_ids.split(",")) {
						categoryBuffer.append((orderCid.toLong, orderCid.toLong))
					}
				} else if (action.pay_category_ids != null) {
					for (orderCid <- action.pay_category_ids.split(",")) {
						categoryBuffer.append((orderCid.toLong, orderCid.toLong))
					}
				}
				categoryBuffer
		}

		//此时 已经获得所有的商品的编号
		cid2CidRDD = cid2CidRDD.distinct()

		// 第二步.统计品类的点击次数
		val cid2ClickCountRDD: RDD[(Long, Long)] = getClikCount(sessionId2FilterActionRDD)
		val cid2OrderCountRDD: RDD[(Long, Long)] = getOrderCount(sessionId2FilterActionRDD)
		val cid2PayCountRDD: RDD[(Long, Long)] = getPayCount(sessionId2FilterActionRDD)

		//整合数据
		val cid2FullCountRDD: RDD[(Long, String)] = getFullCount(cid2CidRDD, cid2ClickCountRDD, cid2OrderCountRDD, cid2PayCountRDD)

		//实现二次排序
		val sortKey2FullCountRDD: RDD[(SortKey, String)] = cid2FullCountRDD.map {
			case (cid, countInfo) => {
				val clikCount = StringUtils.getFieldFromConcatString(countInfo, "\\|", Constants.FIELD_CLICK_COUNT).toLong
				val orderCount = StringUtils.getFieldFromConcatString(countInfo, "\\|", Constants.FIELD_ORDER_COUNT).toLong
				val payCount = StringUtils.getFieldFromConcatString(countInfo, "\\|", Constants.FIELD_PAY_COUNT).toLong

				val sortKey: SortKey = SortKey(clikCount, orderCount, payCount)

				(sortKey, countInfo)
			}
		}

		val top10CategoryArray: Array[(SortKey, String)] = sortKey2FullCountRDD
				.sortByKey(false).take(10)

		val top10CategoryRDD: RDD[Top10Category] = sparkSession.sparkContext.makeRDD(top10CategoryArray).map {
			case (sortKey, countInfo) => {
				val cid = StringUtils.getFieldFromConcatString(countInfo, "\\|", Constants.FIELD_CATEGORY_ID).toLong
				val clickCount = sortKey.clikCount
				val orderCount = sortKey.orderCount
				val payCounnt = sortKey.payCount

				Top10Category(taskUUID, cid, clickCount, orderCount, payCounnt)
			}
		}

		import sparkSession.implicits._

		//top10CategoryRDD.toDF().write
		//		.format("jdbc")
		//		.option("url", ConfigurationManager.config.getString(Constants.JDBC_URL))
		//		.option("user", ConfigurationManager.config.getString(Constants.JDBC_USER))
		//		.option("password", ConfigurationManager.config.getString(Constants.JDBC_PASSWORD))
		//		.option("dbtable", "top10_category_0308")
		//		.mode(SaveMode.Append)
		//		.save()

		top10CategoryArray

	}


	def generateRandomIndexList(extractPerDay: Long,
	                            daySessionCount: Long,
	                            hourCountMap: mutable.HashMap[String, Long],
	                            hourListMap: mutable.HashMap[String, ListBuffer[Int]]) = {
		for ((hour, count) <- hourCountMap) {
			// 获取一个小时要抽取多少条数据
			var hourExrCount = ((count / daySessionCount.toDouble) * extractPerDay).toInt
			// 避免一个小时要抽取的数量超过这个小时的总数
			if (hourExrCount > count) {
				hourExrCount = count.toInt
			}

			val random = new Random()

			hourListMap.get(hour) match {

				case Some(list) =>
					for (i <- 0 until hourExrCount) {
						var index = random.nextInt(count.toInt)
						while (hourListMap(hour).contains(index)) {
							index = random.nextInt(count.toInt)
						}
						hourListMap(hour).append(index)
					}

				case None => hourListMap(hour) = new ListBuffer[Int]
					for (i <- 0 until hourExrCount) {
						var index = random.nextInt(count.toInt)
						while (hourListMap(hour).contains(index)) {
							index = random.nextInt(count.toInt)
						}
						hourListMap(hour).append(index)
					}
			}
		}
	}


	// 随机取样分析
	def sessionRadomExtract(sparkSession: SparkSession,
	                        taskUUID: String,
	                        sessionId2FilterRDD: RDD[(String, String)]) = {

		val dateHour2FullInfoRDD: RDD[(String, String)] = sessionId2FilterRDD.map {
			case (sessionId, fullInfo) =>
				val startTime: String = StringUtils.getFieldFromConcatString(fullInfo, "\\|", Constants.FIELD_START_TIME)
				val dateTime: String = DateUtils.getDateHour(startTime)
				(dateTime, fullInfo)
		}

		// hourCountMap:Map[(dateHour,count)]
		val hourCountMap: collection.Map[String, Long] = dateHour2FullInfoRDD.countByKey()
		// [date, Map[(Hour,count)]]
		val dateHourCountMap = new mutable.HashMap[String, mutable.HashMap[String, Long]]()

		// date  +  Map(hour -> count)
		for ((dateHour, count) <- hourCountMap) {
			var date = dateHour.split("_")(0)
			var hour = dateHour.split("_")(1)

			dateHourCountMap.get(date) match {
				case Some(map) => dateHourCountMap(date) += (hour -> count)
				case None => dateHourCountMap(date) = new mutable.HashMap[String, Long]()
					dateHourCountMap(date) += (hour -> count)
			}
		}

		// 1. 一共多少天: dateHourCountMap.size
		//  总共要抽取100条信息,, 平均到每天抽取的信息数为 =  100 / dateHourCountMap.size
		val extractPerDay: Int = 100 / dateHourCountMap.size

		// 已知一天的信息数extractPerDay 按每小时的占比 分配信息数
		// 每小时的占比 为 小时的信息数/ 一天的总信息数
		val dateHourExtractIndexListMap = new mutable.HashMap[String, mutable.HashMap[String, ListBuffer[Int]]]()
		for ((date, hourCountMap) <- dateHourCountMap) {
			// 一天的信息的总量
			val dateSessionCount: Long = hourCountMap.values.sum
			dateHourExtractIndexListMap.get(date) match {
				case None => dateHourExtractIndexListMap(date) = new mutable.HashMap[String, ListBuffer[Int]]()
					generateRandomIndexList(extractPerDay, dateSessionCount, hourCountMap, dateHourExtractIndexListMap(date))
				case Some(map) =>
					generateRandomIndexList(extractPerDay, dateSessionCount, hourCountMap, dateHourExtractIndexListMap(date))
			}

		}


		//广播变量..提升性能
		val dateHourExtractIndexListMapBd = sparkSession.sparkContext.broadcast(dateHourExtractIndexListMap)

		val dateHour2GroupRDD: RDD[(String, Iterable[String])] = dateHour2FullInfoRDD.groupByKey

		val extractSessionRDD: RDD[SessionRandomExtract] = dateHour2GroupRDD.flatMap {
			case (dateHour, interableFullInfo) =>
				var date = dateHour.split("_")(0)
				var hour = dateHour.split("_")(1)

				val extractList: ListBuffer[Int] = dateHourExtractIndexListMapBd.value.get(date).get(hour)

				val extractSessionArrayBuffer = new ArrayBuffer[SessionRandomExtract]()

				var index = 0

				for (fullInfo <- interableFullInfo) {
					if (extractList.contains(index)) {
						val sessionId = StringUtils.getFieldFromConcatString(fullInfo, "\\|", Constants.FIELD_SESSION_ID)
						val startTime = StringUtils.getFieldFromConcatString(fullInfo, "\\|", Constants.FIELD_START_TIME)
						val searchKeyWords = StringUtils.getFieldFromConcatString(fullInfo, "\\|", Constants.FIELD_SEARCH_KEYWORDS)
						val clickCategories = StringUtils.getFieldFromConcatString(fullInfo, "\\|", Constants.FIELD_CLICK_CATEGORY_IDS)

						val extracSession = SessionRandomExtract(taskUUID, sessionId, startTime, searchKeyWords, clickCategories)

						extractSessionArrayBuffer += extracSession
					}
					index += 1
				}
				extractSessionArrayBuffer
		}

		import sparkSession.implicits._
		extractSessionRDD.toDF().write
				.format("jdbc")
				.option("url", ConfigurationManager.config.getString(Constants.JDBC_URL))
				.option("user", ConfigurationManager.config.getString(Constants.JDBC_USER))
				.option("password", ConfigurationManager.config.getString(Constants.JDBC_PASSWORD))
				.option("dbtable", "session_extract_0308")
				.mode(SaveMode.Append)
				.save()
	}


	// 将数据写入mysql
	def getSessionRatio(sparkSession: SparkSession,
	                    taskUUID: String,
	                    value: mutable.HashMap[String, Int]): Unit = {

		val session_count = value.getOrElse(Constants.SESSION_COUNT, 1).toDouble

		val visitLength_1s_3s = value.getOrElse(Constants.TIME_PERIOD_1s_3s, 0)
		val visitLength_4s_6s = value.getOrElse(Constants.TIME_PERIOD_4s_6s, 0)
		val visitLength_7s_9s = value.getOrElse(Constants.TIME_PERIOD_7s_9s, 0)
		val visitLength_10s_30s = value.getOrElse(Constants.TIME_PERIOD_10s_30s, 0)
		val visitLength_30s_60s = value.getOrElse(Constants.TIME_PERIOD_30s_60s, 0)
		val visitLength_1m_3m = value.getOrElse(Constants.TIME_PERIOD_1m_3m, 0)
		val visitLength_3m_10m = value.getOrElse(Constants.TIME_PERIOD_3m_10m, 0)
		val visitLength_10m_30m = value.getOrElse(Constants.TIME_PERIOD_10m_30m, 0)
		val visitLength_30m = value.getOrElse(Constants.TIME_PERIOD_30m, 0)

		val stepLength_1_3 = value.getOrElse(Constants.STEP_PERIOD_1_3, 0)
		val stepLength_4_6 = value.getOrElse(Constants.STEP_PERIOD_4_6, 0)
		val stepLength_7_9 = value.getOrElse(Constants.STEP_PERIOD_7_9, 0)
		val stepLength_10_30 = value.getOrElse(Constants.STEP_PERIOD_10_30, 0)
		val stepLength_30_60 = value.getOrElse(Constants.STEP_PERIOD_30_60, 0)
		val stepLength_60 = value.getOrElse(Constants.STEP_PERIOD_60, 0)


		val visit_length_1s_3s_ratio = NumberUtils.formatDouble(visitLength_1s_3s / session_count, 2)
		val visit_length_4s_6s_ratio = NumberUtils.formatDouble(visitLength_4s_6s / session_count, 2)
		val visit_length_7s_9s_ratio = NumberUtils.formatDouble(visitLength_7s_9s / session_count, 2)
		val visit_length_10s_30s_ratio = NumberUtils.formatDouble(visitLength_10s_30s / session_count, 2)
		val visit_length_30s_60s_ratio = NumberUtils.formatDouble(visitLength_30s_60s / session_count, 2)
		val visit_length_1m_3m_ratio = NumberUtils.formatDouble(visitLength_1m_3m / session_count, 2)
		val visit_length_3m_10m_ratio = NumberUtils.formatDouble(visitLength_3m_10m / session_count, 2)
		val visit_length_10m_30m_ratio = NumberUtils.formatDouble(visitLength_10m_30m / session_count, 2)
		val visit_length_30m_ratio = NumberUtils.formatDouble(visitLength_30m / session_count, 2)

		val step_length_1_3_ratio = NumberUtils.formatDouble(stepLength_1_3 / session_count, 2)
		val step_length_4_6_ratio = NumberUtils.formatDouble(stepLength_4_6 / session_count, 2)
		val step_length_7_9_ratio = NumberUtils.formatDouble(stepLength_7_9 / session_count, 2)
		val step_length_10_30_ratio = NumberUtils.formatDouble(stepLength_10_30 / session_count, 2)
		val step_length_30_60_ratio = NumberUtils.formatDouble(stepLength_30_60 / session_count, 2)
		val step_length_60_ratio = NumberUtils.formatDouble(stepLength_60 / session_count, 2)

		val sessionAggrStat = SessionAggrStat(taskUUID,
			session_count.toInt, visit_length_1s_3s_ratio, visit_length_4s_6s_ratio, visit_length_7s_9s_ratio,
			visit_length_10s_30s_ratio, visit_length_30s_60s_ratio, visit_length_1m_3m_ratio,
			visit_length_3m_10m_ratio, visit_length_10m_30m_ratio, visit_length_30m_ratio,
			step_length_1_3_ratio, step_length_4_6_ratio, step_length_7_9_ratio,
			step_length_10_30_ratio, step_length_30_60_ratio, step_length_60_ratio)

		import sparkSession.implicits._

		val sessionRatioRDD: RDD[SessionAggrStat] = sparkSession.sparkContext.makeRDD(Array(sessionAggrStat))

		sessionRatioRDD.toDF().write
				.format("jdbc")
				.option("url", ConfigurationManager.config.getString(Constants.JDBC_URL))
				.option("user", ConfigurationManager.config.getString(Constants.JDBC_USER))
				.option("password", ConfigurationManager.config.getString(Constants.JDBC_PASSWORD))
				.option("dbtable", "session_stat_ratio_0416")
				.mode(SaveMode.Append)
				.save()

	}


	def calculateStepLength(stepLength: Long, sessionAccumlator: SessionAccumlator) = {
		if (stepLength >= 1 && stepLength <= 3)
			sessionAccumlator.add(Constants.STEP_PERIOD_1_3)
		else if (stepLength >= 4 && stepLength <= 6)
			sessionAccumlator.add(Constants.STEP_PERIOD_4_6)
		else if (stepLength >= 7 && stepLength <= 9)
			sessionAccumlator.add(Constants.STEP_PERIOD_7_9)
		else if (stepLength >= 10 && stepLength <= 30)
			sessionAccumlator.add(Constants.STEP_PERIOD_10_30)
		else if (stepLength > 30 && stepLength <= 60)
			sessionAccumlator.add(Constants.STEP_PERIOD_30_60)
		else if (stepLength > 60)
			sessionAccumlator.add(Constants.STEP_PERIOD_60)
	}


	def calculateVisitLength(visitLength: Long, sessionAccumlator: SessionAccumlator) = {
		if (visitLength >= 1 && visitLength <= 3)
			sessionAccumlator.add(Constants.TIME_PERIOD_1s_3s)
		else if (visitLength >= 4 && visitLength <= 6)
			sessionAccumlator.add(Constants.TIME_PERIOD_4s_6s)
		else if (visitLength >= 7 && visitLength <= 9)
			sessionAccumlator.add(Constants.TIME_PERIOD_7s_9s)
		else if (visitLength >= 10 && visitLength <= 30)
			sessionAccumlator.add(Constants.TIME_PERIOD_10m_30m)
		else if (visitLength > 30 && visitLength <= 60)
			sessionAccumlator.add(Constants.TIME_PERIOD_30s_60s)
		else if (visitLength > 60 && visitLength <= 180)
			sessionAccumlator.add(Constants.TIME_PERIOD_1m_3m)
		else if (visitLength > 180 && visitLength <= 600)
			sessionAccumlator.add(Constants.TIME_PERIOD_3m_10m)
		else if (visitLength > 600 && visitLength <= 1800)
			sessionAccumlator.add(Constants.TIME_PERIOD_10m_30m)
		else if (visitLength > 1800)
			sessionAccumlator.add(Constants.TIME_PERIOD_30m)
	}


	// 过滤数据的方法
	def getSessionFilteredRDD(taskParam: JSONObject,
	                          sessionId2FullInfoRDD: RDD[(String, String)],
	                          sessionAccumlator: SessionAccumlator) = {

		val startAge = ParamUtils.getParam(taskParam, Constants.PARAM_START_AGE)
		val endAge = ParamUtils.getParam(taskParam, Constants.PARAM_END_AGE)
		val professionals = ParamUtils.getParam(taskParam, Constants.PARAM_PROFESSIONALS)
		val cities = ParamUtils.getParam(taskParam, Constants.PARAM_CITIES)
		val sex = ParamUtils.getParam(taskParam, Constants.PARAM_SEX)
		val keywords = ParamUtils.getParam(taskParam, Constants.PARAM_KEYWORDS)
		val categoryIds = ParamUtils.getParam(taskParam, Constants.PARAM_CATEGORY_IDS)
		//		val targetPageFlow = ParamUtils.getParam(taskParam, Constants.PARAM_TARGET_PAGE_FLOW)

		var filterInfo = (if (startAge != null) Constants.PARAM_START_AGE + "=" + startAge + "|" else "") +
				(if (endAge != null) Constants.PARAM_END_AGE + "=" + endAge + "|" else "") +
				(if (professionals != null) Constants.PARAM_PROFESSIONALS + "=" + professionals + "|" else "") +
				(if (cities != null) Constants.PARAM_CITIES + "=" + cities + "|" else "") +
				(if (sex != null) Constants.PARAM_SEX + "=" + sex + "|" else "") +
				(if (keywords != null) Constants.PARAM_KEYWORDS + "=" + keywords + "|" else "") +
				(if (categoryIds != null) Constants.PARAM_CATEGORY_IDS + "=" + categoryIds + "|" else "")

		if (filterInfo.endsWith("\\|")) {
			filterInfo = filterInfo.substring(0, filterInfo.length - 1)
		}

		sessionId2FullInfoRDD.filter {
			case (session, fullInfo) => {
				var success = true
				if (!ValidUtils.between(fullInfo, Constants.FIELD_AGE, filterInfo, Constants.PARAM_START_AGE, Constants.PARAM_END_AGE)) {
					success = false
				} else if (!ValidUtils.in(fullInfo, Constants.FIELD_PROFESSIONAL, filterInfo, Constants.PARAM_PROFESSIONALS)) {
					success = false
				} else if (!ValidUtils.in(fullInfo, Constants.FIELD_CITY, filterInfo, Constants.PARAM_CITIES)) {
					success = false
				} else if (!ValidUtils.in(fullInfo, Constants.FIELD_SEX, filterInfo, Constants.PARAM_SEX)) {
					success = false
				} else if (!ValidUtils.in(fullInfo, Constants.FIELD_SEARCH_KEYWORDS, filterInfo, Constants.PARAM_KEYWORDS)) {
					success = false
				} else if (!ValidUtils.in(fullInfo, Constants.FIELD_CATEGORY_ID, filterInfo, Constants.PARAM_CATEGORY_IDS)) {
					success = false
				}

				if (success) {
					sessionAccumlator.add(Constants.SESSION_COUNT)
					val visitLength = StringUtils.getFieldFromConcatString(fullInfo, "\\|", Constants.FIELD_VISIT_LENGTH).toLong
					val stepLength = StringUtils.getFieldFromConcatString(fullInfo, "\\|", Constants.FIELD_STEP_LENGTH).toLong

					calculateVisitLength(visitLength, sessionAccumlator)
					calculateStepLength(stepLength, sessionAccumlator)
				}
				success
			}
		}
	}


	// 根据每一条信息进行聚合
	def getSessionFullInfo(sparkSession: SparkSession,
	                       session2GroupActionRDD: RDD[(String, Iterable[UserVisitAction])]) = {
		val userId2AggrInfoRDD: RDD[(Long, String)] = session2GroupActionRDD.map {
			case (sessionId, iterableAction) => {
				var userId = -1L

				var startTime: Date = null
				var endTime: Date = null

				var stepLength = 0

				val searchKeywords = new StringBuffer("")
				val clickCategories = new StringBuffer("")

				for (action <- iterableAction) {
					// 为 userId 赋初值
					if (userId == -1L) userId = action.user_id

					// 更新 startTime  endTime  actionTime
					val actionTime = DateUtils.parseTime(action.action_time)

					if (startTime == null || startTime.after(actionTime)) startTime = actionTime

					if (endTime == null || endTime.before(actionTime)) endTime = actionTime

					val searchKeyword: String = action.search_keyword

					if (StringUtils.isNotEmpty(searchKeyword) && !searchKeywords.toString.contains(searchKeyword)) {
						searchKeywords.append(searchKeyword + ",")
					}

					val clickCategoryId: Long = action.click_category_id

					if (clickCategoryId != -1L && !clickCategories.toString.contains(clickCategoryId))
						clickCategories.append(clickCategoryId + ",")

					stepLength += 1
				}

				// 处理Stringbuffer的逗号问题
				val searchKw = StringUtils.trimComma(searchKeywords.toString)
				val clickCg = StringUtils.trimComma(clickCategories.toString)

				val visitLength = (endTime.getTime - startTime.getTime) / 1000

				val aggrInfo = Constants.FIELD_SESSION_ID + "=" + sessionId + "|" +
						Constants.FIELD_SEARCH_KEYWORDS + "=" + searchKw + "|" +
						Constants.FIELD_CLICK_CATEGORY_IDS + "=" + clickCg + "|" +
						Constants.FIELD_VISIT_LENGTH + "=" + visitLength + "|" +
						Constants.FIELD_STEP_LENGTH + "=" + stepLength + "|" +
						Constants.FIELD_START_TIME + "=" + DateUtils.formatTime(startTime)

				(userId, aggrInfo)
			}
		}

		val sql = "select * from user_info"

		import sparkSession.implicits._
		val userId2InfoRDD: RDD[(Long, UserInfo)] = sparkSession.sql(sql).as[UserInfo].rdd.map(item => (item.user_id, item))

		val sessionId2FullInfoRDD: RDD[(String, String)] = userId2AggrInfoRDD.join(userId2InfoRDD).map {
			case (userId, (aggrInfo, userInfo)) =>
				val age: Int = userInfo.age
				val professional: String = userInfo.professional
				val sex: String = userInfo.sex
				val city: String = userInfo.city

				val fullInfo = aggrInfo + "|" +
						Constants.FIELD_AGE + "=" + age + "|" +
						Constants.FIELD_PROFESSIONAL + "=" + professional + "|" +
						Constants.FIELD_SEX + "=" + sex + "|" +
						Constants.FIELD_CITY + "=" + city + "|"

				val sessionID: String = StringUtils.getFieldFromConcatString(aggrInfo, "\\|", Constants.FIELD_SESSION_ID)
				(sessionID, fullInfo)
		}
		sessionId2FullInfoRDD
	}


	// 筛选信息
	def getOrActionRDD(sparkSession: SparkSession, taskParam: JSONObject) = {
		// 设置 startDate endDate -- 限制条件
		val startDate: String = ParamUtils.getParam(taskParam, Constants.PARAM_START_DATE)
		val endDate: String = ParamUtils.getParam(taskParam, Constants.PARAM_END_DATE)
		import sparkSession.implicits._

		val sql = "select * from user_visit_action where date>='" + startDate + "' and date<='" + endDate + "'"
		sparkSession.sql(sql).as[UserVisitAction].rdd
	}

}