package com.ymm.spark.spark.session;

import com.alibaba.fastjson.JSONObject;
import com.google.common.base.Optional;
import com.ymm.spark.config.ConfigurationManager;
import com.ymm.spark.constant.Constants;
import com.ymm.spark.dao.ISessionAggrStatDAO;
import com.ymm.spark.dao.ISessionDetailDAO;
import com.ymm.spark.dao.ISessionRandomExtractDAO;
import com.ymm.spark.dao.ITaskDAO;
import com.ymm.spark.dao.ITop10CategoryDAO;
import com.ymm.spark.dao.ITop10SessionDAO;
import com.ymm.spark.dao.factory.DAOFactory;
import com.ymm.spark.domain.SessionAggrStat;
import com.ymm.spark.domain.SessionDetail;
import com.ymm.spark.domain.SessionRandomExtract;
import com.ymm.spark.domain.Task;
import com.ymm.spark.domain.Top10Category;
import com.ymm.spark.domain.Top10Session;
import com.ymm.spark.test.MockData;
import com.ymm.spark.util.DateUtils;
import com.ymm.spark.util.NumberUtils;
import com.ymm.spark.util.ParamUtils;
import com.ymm.spark.util.StringUtils;
import com.ymm.spark.util.ValidUtils;
import lombok.extern.slf4j.Slf4j;
import org.apache.spark.Accumulator;
import org.apache.spark.SparkConf;
import org.apache.spark.SparkContext;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.api.java.function.Function2;
import org.apache.spark.api.java.function.PairFlatMapFunction;
import org.apache.spark.api.java.function.PairFunction;
import org.apache.spark.api.java.function.VoidFunction;
import org.apache.spark.broadcast.Broadcast;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SQLContext;
import org.apache.spark.sql.hive.HiveContext;
import org.apache.spark.storage.StorageLevel;
import scala.Tuple2;

import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Random;

/**
 * @author zhangj
 * @date 2019/10/27
 * 用户访问session分析Spark作业
 * 接受用户创建的分析任务,用户可能指定的条件如下:
 * 1:时间范围,起始时间~结束时间
 * 2:性别,男或女
 * 3:年龄范围
 * 4:职业:多选
 * 5:城市:多选
 * 6:搜索词,多个搜索词,只要session中的任何一个action搜索过指定的关键词,那么session就符合条件
 * 7:点击品类,多个品类,只要session中的任何一个action点击过某个品类,那么session就符合条件
 * <p>
 * 我们的spark作业如何接受用户提交的作业?
 * J2EE平台在接受到用户创建任务的请求后,会将任务信息插入到MYSQL的task表中,任务参数以JSON的格式封装task_param字段中
 * 接着J2EE平台会执行我们的spark-submit shell脚本,并将taskid作为参数传递给spark-submit shell脚本
 * spark-submit shell脚本在执行时可以接受参数,并且将接受的参数,传递给spark作业的main函数args参数
 */
@Slf4j
public class UserVisitSessionAnalyzeSpark {
	public static void main(String[] args) {
		args = new String[]{"1"};

		SparkConf conf = new SparkConf()
				.setAppName(Constants.SPARK_APP_NAME_SESSION)
				.setMaster("local")
				//调整cache内存占比,增加task算子操作的内存,默认0.6,就是cache占60%内存
				.set("spark.storage.memoryFraction", "0.5")
				//使用kryo序列化库
				.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
				//开启shuffle map端输出文件的合并机制
				.set("spark.shuffle.consolidateFiles", "true")
				//map端内存缓存,默认32Kb
				.set("spark.shuffle.file.buffer", "64")
				//reduce端内存占比,默认0.2
				.set("spark.shuffle.memoryFraction", "0.3")
				//shuffle模式,默认是hash,sort(values是排序的),tungsten-sort(钨丝)
				.set("spark.shuffle.manager", "hash")
				//shuffle reduce端缓存大小，默认48M,每次少拉一点多拉几次
				.set("spark.reducer.maxSizeInFlight", "24")
				//这个参数意思是，如果没有拉取到（拉取失败），最多重试次数，默认是3次
				.set("spark.shuffle.io.maxRetries", "60")
				//每次重试拉取文件的时间间隔
				.set("spark.shuffle.io.retryWait", "60")
				.registerKryoClasses(new Class[]{CategorySortKey.class});


		JavaSparkContext sc = new JavaSparkContext(conf);
		//使用checkpoint第一步，设置checkpoint保存目录
		//jssc.checkpointFile("hdfs://");
		SQLContext sqlContext = getSQLContext(sc.sc());
		//生成模拟数据
		mockData(sc, sqlContext);

		//获取taskDAO
		ITaskDAO taskDAO = DAOFactory.getTaskDAO();

		//获取taskid,taskid是从J2EE平台执行spark-submit shell脚本,传递给args参数的
		Long taskid = ParamUtils.getTaskIdFromArgs(args, Constants.SPARK_LOCAL_TASKID_PAGE);

		//获取task信息
		Task task = taskDAO.findById(taskid);

		//获取要执行任务的查询参数
		JSONObject taskParam = JSONObject.parseObject(task.getTaskParam());

		//首先从hive的user_visit_session表中了,查询指定日期内的行为数据
		JavaRDD<Row> actionRDD = getActionRDDByDateRange(sqlContext, taskParam);

		//组装数据<sessionid,Row>,后面获取session明细用
		JavaPairRDD<String, Row> sessionid2actionRDD = getSessionid2ActionRDD(actionRDD);

		//进行持久化
		//第一选择:StorageLevel.MEMORY_ONLY()
		//第二选择:StorageLevel.MEMORY_ONLY_SER()
		//第三选择:StorageLevel.MEMORY_AND_DISK()
		//第四选择:StorageLevel.MEMORY_AND_DISK_SER()
		//第五选择:StorageLevel.DISK_ONLY()
		//如果内存充足,要使用双副本高可靠机制,选择后缀带_2的策略
		sessionid2actionRDD = sessionid2actionRDD.persist(StorageLevel.MEMORY_ONLY());

		//checkpoint第二步
		//sessionid2actionRDD.checkpoint();

		//首先可以将行为数据,按照session_id进行groupByKey分组,得到session_id粒度的数据
		//然后与用户信息进行join加入用户的信息
		JavaPairRDD<String, String> sessionid2AggrInfoRDD = aggregateBySession(sc, sqlContext, sessionid2actionRDD);

		//使用自定义Accumulator进行自定义统计,统计访问时长和步数
		Accumulator<String> sessionAggrStatAccumulator = sc.accumulator("", new SessionAggrStatAcumulator());

		//对session粒度的数据,按照使用者指定的筛选参数进行数据过滤和聚合统计
		JavaPairRDD<String, String> filteredSessionid2AggrInfoRDD = filterSessionAndStat(sessionid2AggrInfoRDD, taskParam, sessionAggrStatAccumulator);

		filteredSessionid2AggrInfoRDD = filteredSessionid2AggrInfoRDD.persist(StorageLevel.MEMORY_ONLY());

		// 生成公共的RDD：通过筛选条件的session的访问明细数据
		JavaPairRDD<String, Row> sessionid2detailRDD = getSessionid2detailRDD(filteredSessionid2AggrInfoRDD, sessionid2actionRDD);

		sessionid2detailRDD = sessionid2detailRDD.persist(StorageLevel.MEMORY_ONLY());

		//随机抽取session
		randomExtractSession(sc, task.getTaskid(), filteredSessionid2AggrInfoRDD, sessionid2detailRDD);

		//计算各范围session占比,并写入MYSQL
		calculateAndPersistAggrStat(sessionAggrStatAccumulator.value(), task.getTaskid());

		//热门商品top10,二次排序 点击次数,下单次数,支付次数,依次排序
		List<Tuple2<CategorySortKey, String>> top10CategoryList = getTop10Catagory(sessionid2detailRDD, taskid);

		//获取top10活跃session
		getTop10Session(sc, taskid, top10CategoryList, sessionid2detailRDD);

		//关闭Spark上下文
		sc.close();
	}

	/**
	 * 获取top10活跃session
	 *
	 * @param jssc
	 * @param taskid
	 * @param top10CategoryList
	 * @param sessionid2detailRDD
	 */
	private static void getTop10Session(JavaSparkContext jssc, final Long taskid,
	                                    List<Tuple2<CategorySortKey, String>> top10CategoryList,
	                                    JavaPairRDD<String, Row> sessionid2detailRDD) {
		/**
		 * 第一步:将top10热门品类的id,生成一份RDD
		 */
		List<Tuple2<Long, Long>> top10CategoryIdList = new ArrayList<Tuple2<Long, Long>>();

		for (Tuple2<CategorySortKey, String> category : top10CategoryList) {
			Long categoryid = Long.valueOf(StringUtils.getFieldFromConcatString(category._2, "\\|", Constants.FIELD_CATEGORY_ID));
			top10CategoryIdList.add(new Tuple2<Long, Long>(categoryid, categoryid));
		}

		JavaPairRDD<Long, Long> top10CategoryIdRDD = jssc.parallelizePairs(top10CategoryIdList);

		/**
		 * 计算top10品类被各个session的点击次数
		 */
		JavaPairRDD<String, Iterable<Row>> sessionid2detailsRDD = sessionid2detailRDD.groupByKey();

		JavaPairRDD<Long, String> categoryid2sessionCountRDD = sessionid2detailsRDD.flatMapToPair(
				new PairFlatMapFunction<Tuple2<String, Iterable<Row>>, Long, String>() {
					@Override
					public Iterable<Tuple2<Long, String>> call(Tuple2<String, Iterable<Row>> tuple) throws Exception {
						String sessionid = tuple._1;
						Iterator<Row> iterator = tuple._2.iterator();
						Map<Long, Long> categoryCountMap = new HashMap<Long, Long>();
						while (iterator.hasNext()) {
							Row row = iterator.next();
							if (row.get(6) != null) {
								long categoryid = row.getLong(6);
								Long count = categoryCountMap.get(categoryid);
								if (count == null) {
									count = 0L;
								}
								count++;
								categoryCountMap.put(categoryid, count);
							}
						}

						//返回结果,<categoryid,<sessionid,count>>的格式
						List<Tuple2<Long, String>> list = new ArrayList<Tuple2<Long, String>>();
						for (Map.Entry<Long, Long> categoryCountEntry : categoryCountMap.entrySet()) {
							Long categoryid = categoryCountEntry.getKey();
							Long count = categoryCountEntry.getValue();
							String value = sessionid + "," + count;
							list.add(new Tuple2<Long, String>(categoryid, value));
						}
						return list;
					}
				});

		//获取top10热门品类,被各个session的点击次数
		JavaPairRDD<Long, String> top10CategorySessionCountRDD = top10CategoryIdRDD.join(categoryid2sessionCountRDD)
				.mapToPair(new PairFunction<Tuple2<Long, Tuple2<Long, String>>, Long, String>() {
					@Override
					public Tuple2<Long, String> call(Tuple2<Long, Tuple2<Long, String>> tuple) throws Exception {
						return new Tuple2<Long, String>(tuple._1, tuple._2._2);
					}
				});

		/**
		 * 第三步:分组取topN算法,获取每个品类的top10的活跃用户
		 */
		JavaPairRDD<Long, Iterable<String>> top10CategorySessionCountsRDD = top10CategorySessionCountRDD.groupByKey();

		JavaPairRDD<String, String> top10SessionRDD = top10CategorySessionCountsRDD.flatMapToPair(
				new PairFlatMapFunction<Tuple2<Long, Iterable<String>>, String, String>() {
					@Override
					public Iterable<Tuple2<String, String>> call(Tuple2<Long, Iterable<String>> tuple) throws Exception {
						Long categoryid = tuple._1;
						Iterator<String> iterator = tuple._2.iterator();
						//定义取topn的排序数组
						String[] top10Sessions = new String[10];
						while (iterator.hasNext()) {
							String sessionCount = iterator.next();
							Long count = Long.valueOf(sessionCount.split(",")[1]);
							//遍历排序数组
							for (int i = 0; i < top10Sessions.length; i++) {
								if (top10Sessions[i] == null) {
									top10Sessions[i] = sessionCount;
									break;
								} else {
									long _count = Long.valueOf(top10Sessions[i].split(",")[1]);
									if (count > _count) {
										//从排序数组最后一位开始,到i位,所有数据往后挪一位
										for (int j = 9; j > i; j--) {
											top10Sessions[j] = top10Sessions[j - 1];
										}
										//将i位赋值为sessionCount
										top10Sessions[i] = sessionCount;
										break;
									}
								}
							}
						}
						//将数据写入Mysql
						List<Tuple2<String, String>> list = new ArrayList<Tuple2<String, String>>();
						for (String sessionCount : top10Sessions) {
							if (sessionCount != null) {
								String sessionid = sessionCount.split(",")[0];
								Long count = Long.valueOf(sessionCount.split(",")[1]);
								//将top10session插入MYSQL
								Top10Session top10Session = new Top10Session();
								top10Session.setTaskid(taskid);
								top10Session.setCategoryid(categoryid);
								top10Session.setSessionid(sessionid);
								top10Session.setClickCount(count);
								ITop10SessionDAO iTop10SessionDAO = DAOFactory.getITop10SessionDAO();
								iTop10SessionDAO.insert(top10Session);
								list.add(new Tuple2<String, String>(sessionid, sessionid));
							}
						}
						return list;
					}
				});

		/**
		 * 第四步:获取top10活跃ssession的明细数据,并写入MYSQL
		 */
		JavaPairRDD<String, Tuple2<String, Row>> sessionDetailRDD = top10SessionRDD.join(sessionid2detailRDD);

		sessionDetailRDD.foreachPartition(new VoidFunction<Iterator<Tuple2<String, Tuple2<String, Row>>>>() {
			@Override
			public void call(Iterator<Tuple2<String, Tuple2<String, Row>>> iterator) throws Exception {
				List<SessionDetail> sessionDetails = new ArrayList<SessionDetail>();
				while (iterator.hasNext()){
					Tuple2<String, Tuple2<String, Row>> tuple = iterator.next();
					Row row = tuple._2._2;
					SessionDetail sessionDetail = new SessionDetail();
					sessionDetail.setTaskid(taskid);
					sessionDetail.setUserid(row.getLong(1));
					sessionDetail.setSessionid(row.getString(2));
					sessionDetail.setPageid(row.getLong(3));
					sessionDetail.setActionTime(row.getString(4));
					sessionDetail.setSearchKeyword(row.getString(5));
					sessionDetail.setClickCategoryId(row.getLong(6));
					sessionDetail.setClickProductId(row.getLong(7));
					sessionDetail.setOrderCategoryIds(row.getString(8));
					sessionDetail.setOrderProductIds(row.getString(9));
					sessionDetail.setPayCategoryIds(row.getString(10));
					sessionDetail.setPayProductIds(row.getString(11));
					sessionDetails.add(sessionDetail);
				}
				ISessionDetailDAO sessionDetailDAO = DAOFactory.getISessionDetailDAO();
				sessionDetailDAO.insertBatch(sessionDetails);
			}
		});
	}

	/**
	 * 获取通过筛选条件的session的访问明细数据RDD
	 *
	 * @param sessionid2actionRDD
	 * @return
	 */
	private static JavaPairRDD<String, Row> getSessionid2detailRDD(JavaPairRDD<String, String> sessionid2aggrInfoRDD,
	                                                               JavaPairRDD<String, Row> sessionid2actionRDD) {
		return sessionid2aggrInfoRDD.join(sessionid2actionRDD)
				.mapToPair(new PairFunction<Tuple2<String, Tuple2<String, Row>>, String, Row>() {
					@Override
					public Tuple2<String, Row> call(Tuple2<String, Tuple2<String, Row>> tuple) throws Exception {
						return new Tuple2<String, Row>(tuple._1, tuple._2._2);
					}
				});
	}

	/**
	 * 热门商品top10,二次排序 点击次数,下单次数,支付次数,依次排序
	 *
	 * @param sessionid2detailRDD
	 * @param taskid
	 */
	private static List<Tuple2<CategorySortKey, String>> getTop10Catagory(JavaPairRDD<String, Row> sessionid2detailRDD, Long taskid) {
		/**
		 * 第一步:获取符合条件的session访问过的所有品类
		 */
		//获取session访问过的所有品类id,点击过,下单过,支付过
		JavaPairRDD<Long, Long> categoryidRDD = sessionid2detailRDD.flatMapToPair(new PairFlatMapFunction<Tuple2<String, Row>, Long, Long>() {
			@Override
			public Iterable<Tuple2<Long, Long>> call(Tuple2<String, Row> tuple) throws Exception {
				Row row = tuple._2;

				List<Tuple2<Long, Long>> list = new ArrayList<Tuple2<Long, Long>>();

				//点击过的品类id
				Long clickCategoryId = row.getLong(6);
				if (clickCategoryId != null) {
					list.add(new Tuple2<Long, Long>(clickCategoryId, clickCategoryId));
				}
				//下单过的品类id
				String orderCategoryIds = row.getString(8);
				if (orderCategoryIds != null) {
					String[] orderCategoryIdsSplited = orderCategoryIds.split(",");
					for (String orderCategoryId : orderCategoryIdsSplited) {
						list.add(new Tuple2<Long, Long>(Long.valueOf(orderCategoryId), Long.valueOf(orderCategoryId)));
					}
				}
				//支付过的品类id
				String payCategoryIds = row.getString(10);
				if (payCategoryIds != null) {
					String[] payCategoryIdsSplited = payCategoryIds.split(",");
					for (String payCategoryId : payCategoryIdsSplited) {
						list.add(new Tuple2<Long, Long>(Long.valueOf(payCategoryId),
								Long.valueOf(payCategoryId)));
					}
				}
				return list;
			}
		});

		//必须去重
		categoryidRDD = categoryidRDD.distinct();

		/**
		 * 第二步:计算各品类的点击,下单,支付次数
		 */
		//计算各个品类id的点击次数
		JavaPairRDD<Long, Long> clickCategoryId2CountRDD = getClickCategoryId2CountRDD(sessionid2detailRDD);
		//计算各个品类的下单次数
		JavaPairRDD<Long, Long> orderCategoryId2CountRDD = getOrderCategoryId2CountRDD(sessionid2detailRDD);
		// 计算各个品类的支付次数
		JavaPairRDD<Long, Long> payCategoryId2CountRDD = getPayCategoryId2CountRDD(sessionid2detailRDD);

		/**
		 * 第三步:join各品类与它的点击,下单,支付次数
		 * categoryidRDD中包含了,所有符合条件session,访问过的品类id,而上面计算出来的各品类的点击,下单,支付的次数,可能不是包含所有的品类,
		 * 所以用leftOuterJoin操作,如果没有join的话,就是0
		 */
		JavaPairRDD<Long, String> categoryid2countRDD = joinCategoryAndData(categoryidRDD, clickCategoryId2CountRDD,
				orderCategoryId2CountRDD, payCategoryId2CountRDD);

		/**
		 * 第四步:自定义二次排序key
		 */

		/**
		 * 第五步:将数据映射成<CategorySortKey,info>格式的RDD,然后进行二次排序(降序)
		 */
		JavaPairRDD<CategorySortKey, String> sortKey2countRDD = categoryid2countRDD.mapToPair(
				new PairFunction<Tuple2<Long, String>, CategorySortKey, String>() {
					@Override
					public Tuple2<CategorySortKey, String> call(Tuple2<Long, String> tuple) throws Exception {
						String countInfo = tuple._2;
						long clickCount = Long.valueOf(StringUtils.getFieldFromConcatString(countInfo, "\\|",
								Constants.FIELD_CLICK_COUNT));
						long orderCount = Long.valueOf(StringUtils.getFieldFromConcatString(countInfo, "\\|",
								Constants.FIELD_ORDER_COUNT));
						long payCount = Long.valueOf(StringUtils.getFieldFromConcatString(countInfo, "\\|",
								Constants.FIELD_PAY_COUNT));

						CategorySortKey categorySortKey = new CategorySortKey(clickCount, orderCount, payCount);

						return new Tuple2<CategorySortKey, String>(categorySortKey, countInfo);
					}
				});

		JavaPairRDD<CategorySortKey, String> sortedCategoryCountRDD = sortKey2countRDD.sortByKey(false);

		/**
		 * 第六步:用take(10)取出top10热门品类,并写入MYSQL
		 */
		ITop10CategoryDAO top10CategoryDAO = DAOFactory.getITop10CategoryDAO();
		List<Tuple2<CategorySortKey, String>> top10CategoryList = sortedCategoryCountRDD.take(10);
		for (Tuple2<CategorySortKey, String> tuple : top10CategoryList) {
			String countInfo = tuple._2;
			long categoryid = Long.valueOf(StringUtils.getFieldFromConcatString(
					countInfo, "\\|", Constants.FIELD_CATEGORY_ID));
			long clickCount = Long.valueOf(StringUtils.getFieldFromConcatString(countInfo, "\\|",
					Constants.FIELD_CLICK_COUNT));
			long orderCount = Long.valueOf(StringUtils.getFieldFromConcatString(countInfo, "\\|",
					Constants.FIELD_ORDER_COUNT));
			long payCount = Long.valueOf(StringUtils.getFieldFromConcatString(countInfo, "\\|",
					Constants.FIELD_PAY_COUNT));
			Top10Category top10Category = new Top10Category();
			top10Category.setTaskid(taskid);
			top10Category.setCategoryid(categoryid);
			top10Category.setClickCount(clickCount);
			top10Category.setOrderCount(orderCount);
			top10Category.setPayCount(payCount);

			top10CategoryDAO.insert(top10Category);
		}
		return top10CategoryList;
	}

	/**
	 * join各品类与它的点击,下单,支付次数
	 *
	 * @param categoryidRDD
	 * @param clickCategoryId2CountRDD
	 * @param orderCategoryId2CountRDD
	 * @param payCategoryId2CountRDD
	 * @return
	 */
	private static JavaPairRDD<Long, String> joinCategoryAndData(JavaPairRDD<Long, Long> categoryidRDD,
	                                                             JavaPairRDD<Long, Long> clickCategoryId2CountRDD,
	                                                             JavaPairRDD<Long, Long> orderCategoryId2CountRDD,
	                                                             JavaPairRDD<Long, Long> payCategoryId2CountRDD) {

		JavaPairRDD<Long, Tuple2<Long, Optional<Long>>> tmpJoinRDD = categoryidRDD.leftOuterJoin(clickCategoryId2CountRDD);

		JavaPairRDD<Long, String> tmpMapRDD = tmpJoinRDD.mapToPair(
				new PairFunction<Tuple2<Long, Tuple2<Long, Optional<Long>>>, Long, String>() {
					@Override
					public Tuple2<Long, String> call(Tuple2<Long, Tuple2<Long, Optional<Long>>> tuple) throws Exception {
						Long categoryid = tuple._1;
						Optional<Long> optional = tuple._2._2;
						long clickCount = 0;
						if (optional.isPresent()) {
							clickCount = optional.get();
						}
						String value = Constants.FIELD_CATEGORY_ID + "=" + categoryid + "|"
								+ Constants.FIELD_CLICK_COUNT + "=" + clickCount;
						return new Tuple2<Long, String>(categoryid, value);
					}
				});

		tmpMapRDD = tmpMapRDD.leftOuterJoin(orderCategoryId2CountRDD)
				.mapToPair(new PairFunction<Tuple2<Long, Tuple2<String, Optional<Long>>>, Long, String>() {
					@Override
					public Tuple2<Long, String> call(Tuple2<Long, Tuple2<String, Optional<Long>>> tuple) throws Exception {
						Long categoryid = tuple._1;
						String value = tuple._2._1;
						Optional<Long> optional = tuple._2._2;
						long orderCount = 0;
						if (optional.isPresent()) {
							orderCount = optional.get();
						}
						value += "|" + Constants.FIELD_ORDER_COUNT + "=" + orderCount;
						return new Tuple2<Long, String>(categoryid, value);
					}
				});

		tmpMapRDD = tmpMapRDD.leftOuterJoin(payCategoryId2CountRDD)
				.mapToPair(new PairFunction<Tuple2<Long, Tuple2<String, Optional<Long>>>, Long, String>() {
					@Override
					public Tuple2<Long, String> call(Tuple2<Long, Tuple2<String, Optional<Long>>> tuple) throws Exception {
						Long categoryid = tuple._1;
						String value = tuple._2._1;
						Optional<Long> optional = tuple._2._2;
						long payCount = 0;
						if (optional.isPresent()) {
							payCount = optional.get();
						}
						value += "|" + Constants.FIELD_PAY_COUNT + "=" + payCount;
						return new Tuple2<Long, String>(categoryid, value);
					}
				});

		return tmpMapRDD;
	}

	/**
	 * 计算各个品类的支付次数
	 *
	 * @param sessionid2detailRDD
	 * @return
	 */
	private static JavaPairRDD<Long, Long> getPayCategoryId2CountRDD(JavaPairRDD<String, Row> sessionid2detailRDD) {
		JavaPairRDD<String, Row> payActionRDD = sessionid2detailRDD.filter(new Function<Tuple2<String, Row>, Boolean>() {
			@Override
			public Boolean call(Tuple2<String, Row> tuple) throws Exception {
				Row row = tuple._2;
				return row.getString(10) != null;
			}
		});

		JavaPairRDD<Long, Long> payCategoryIdRDD = payActionRDD.flatMapToPair(
				new PairFlatMapFunction<Tuple2<String, Row>, Long, Long>() {
					@Override
					public Iterable<Tuple2<Long, Long>> call(Tuple2<String, Row> tuple) throws Exception {
						Row row = tuple._2;
						String payCategoryIds = row.getString(10);
						String[] payCategoryIdsSplited = payCategoryIds.split(",");
						List<Tuple2<Long, Long>> list = new ArrayList<Tuple2<Long, Long>>();
						for (String payCategoryId : payCategoryIdsSplited) {
							list.add(new Tuple2<Long, Long>(Long.valueOf(payCategoryId), 1L));
						}
						return list;
					}
				});

		JavaPairRDD<Long, Long> payCategoryId2CountRDD = payCategoryIdRDD.reduceByKey(new Function2<Long, Long, Long>() {
			@Override
			public Long call(Long v1, Long v2) throws Exception {
				return v1 + v2;
			}
		});
		return payCategoryId2CountRDD;
	}

	/**
	 * 计算各个品类的下单次数
	 *
	 * @param sessionid2detailRDD
	 * @return
	 */
	private static JavaPairRDD<Long, Long> getOrderCategoryId2CountRDD(JavaPairRDD<String, Row> sessionid2detailRDD) {
		JavaPairRDD<String, Row> orderActionRDD = sessionid2detailRDD.filter(new Function<Tuple2<String, Row>, Boolean>() {
			@Override
			public Boolean call(Tuple2<String, Row> tuple) throws Exception {
				Row row = tuple._2;
				return row.getString(8) != null;
			}
		});
		JavaPairRDD<Long, Long> orderCategoryIdRDD = orderActionRDD.flatMapToPair(
				new PairFlatMapFunction<Tuple2<String, Row>, Long, Long>() {
					@Override
					public Iterable<Tuple2<Long, Long>> call(Tuple2<String, Row> tuple) throws Exception {
						Row row = tuple._2;
						String orderCategoryIds = row.getString(8);
						String[] orderCategoryIdsSplited = orderCategoryIds.split(",");
						List<Tuple2<Long, Long>> list = new ArrayList<Tuple2<Long, Long>>();
						for (String orderCategoryId : orderCategoryIdsSplited) {
							list.add(new Tuple2<Long, Long>(Long.valueOf(orderCategoryId), 1L));
						}
						return list;
					}
				});
		JavaPairRDD<Long, Long> orderCategoryId2CountRDD = orderCategoryIdRDD.reduceByKey(new Function2<Long, Long, Long>() {
			@Override
			public Long call(Long v1, Long v2) throws Exception {
				return v1 + v2;
			}
		});
		return orderCategoryId2CountRDD;
	}

	/**
	 * 计算各个品类id的点击次数
	 *
	 * @param sessionid2detailRDD
	 * @return
	 */
	private static JavaPairRDD<Long, Long> getClickCategoryId2CountRDD(JavaPairRDD<String, Row> sessionid2detailRDD) {
		/**
		 * 使用coalesce对filter过滤后的数据进行处理,减少partition的数量
		 * 这儿,是对完整的数据进行了filter过滤,过滤出点击行为数据
		 * 点击行为数据其实只占总数据的很小一部分,所以,过滤以后的,每个partition的数据量,
		 * 很有可能跟我们之前说的一样,会恨不均匀,而且数据量肯定会变少很多
		 * 这种情况,还是比较适合coalesce算子的,在filter过后减少partition的数量
		 */
		JavaPairRDD<String, Row> clickActionRDD = sessionid2detailRDD.filter(
				new Function<Tuple2<String, Row>, Boolean>() {
			@Override
			public Boolean call(Tuple2<String, Row> tuple) throws Exception {
				Row row = tuple._2;
				return row.get(6) != null;
			}
				}).coalesce(100);

		JavaPairRDD<Long, Long> clickCategoryIdRDD = clickActionRDD.mapToPair(new PairFunction<Tuple2<String, Row>, Long, Long>() {
			@Override
			public Tuple2<Long, Long> call(Tuple2<String, Row> tuple) throws Exception {
				long clickCatagoryId = tuple._2.getLong(6);
				return new Tuple2<Long, Long>(clickCatagoryId, 1L);
			}
		});

		JavaPairRDD<Long, Long> clickCategoryId2CountRDD = clickCategoryIdRDD.reduceByKey(new Function2<Long, Long, Long>() {
			@Override
			public Long call(Long v1, Long v2) throws Exception {
				return v1 + v2;
			}
		});

		/**
		 * 提高shuffle reduce并行度,解决数据倾斜问题
		 */
		// JavaPairRDD<Long, Long> clickCategoryId2CountRDD = clickCategoryIdRDD.reduceByKey(new Function2<Long, Long, Long>() {
		// 	@Override
		// 	public Long call(Long v1, Long v2) throws Exception {
		// 		return v1 + v2;
		// 	}
		// },100);
		// /**
		//  * 第一步:给每个key打上一个随机数
		//  */
		// JavaPairRDD<String, Long> mappedClickCategoryIdRDD = clickCategoryIdRDD.mapToPair(
		// 		new PairFunction<Tuple2<Long, Long>, String, Long>() {
		// 			@Override
		// 			public Tuple2<String, Long> call(Tuple2<Long, Long> tuple) throws Exception {
		// 				Random random = new Random(System.currentTimeMillis());
		// 				int prefix = random.nextInt(10);
		// 				return new Tuple2<String, Long>(prefix + "_" + tuple._1, tuple._2);
		// 			}
		// 		});
		// /**
		//  * 第二步:执行第一轮局部聚合
		//  */
		// JavaPairRDD<String, Long> firstAggrRDD = mappedClickCategoryIdRDD.reduceByKey(
		// 		new Function2<Long, Long, Long>() {
		// 			@Override
		// 			public Long call(Long v1, Long v2) throws Exception {
		// 				return v1 + v2;
		// 			}
		// 		});
		// /**
		//  * 第三步:去除每个key的前缀
		//  */
		// JavaPairRDD<Long, Long> restoredRDD = firstAggrRDD.mapToPair(
		// 		new PairFunction<Tuple2<String, Long>, Long, Long>() {
		// 			@Override
		// 			public Tuple2<Long, Long> call(Tuple2<String, Long> tuple) throws Exception {
		// 				Long categoryId = Long.valueOf(tuple._1.split("_")[1]);
		// 				return new Tuple2<Long, Long>(categoryId, tuple._2);
		// 			}
		// 		});
		// /**
		//  * 第四部:第二轮全局聚合
		//  */
		// JavaPairRDD<Long, Long> globalAggrRDD = restoredRDD.reduceByKey(new Function2<Long, Long, Long>() {
		// 	@Override
		// 	public Long call(Long v1, Long v2) throws Exception {
		// 		return v1 + v2;
		// 	}
		// });
		return clickCategoryId2CountRDD;
	}

	/**
	 * 获取<sessionid,Row>格式的数据
	 *
	 * @param actionRDD
	 * @return
	 */
	private static JavaPairRDD<String, Row> getSessionid2ActionRDD(JavaRDD<Row> actionRDD) {
		return actionRDD.mapPartitionsToPair(new PairFlatMapFunction<Iterator<Row>, String, Row>() {
			@Override
			public Iterable<Tuple2<String, Row>> call(Iterator<Row> iterator) throws Exception {
				List<Tuple2<String, Row>> list = new ArrayList<Tuple2<String, Row>>();
				while (iterator.hasNext()) {
					Row row = iterator.next();
					list.add(new Tuple2<String, Row>(row.getString(2), row));
				}
				return list;
			}
		});
	}

	/**
	 * 随机抽取session
	 * @param jssc
	 * @param taskid
	 * @param filteredSessionid2AggrInfoRDD
	 * @param sessionid2actionRDD
	 */
	private static void randomExtractSession(JavaSparkContext jssc, final long taskid, JavaPairRDD<String, String> filteredSessionid2AggrInfoRDD,
	                                         JavaPairRDD<String, Row> sessionid2actionRDD) {
		//计算每天每小时session的数量,获取<yyyy-MM-dd_HH,aggrInfo>的格式
		JavaPairRDD<String, String> time2sessionidRDD = filteredSessionid2AggrInfoRDD.mapToPair(new PairFunction<Tuple2<String, String>, String, String>() {
			@Override
			public Tuple2<String, String> call(Tuple2<String, String> tuple) throws Exception {
				String aggrInfo = tuple._2;
				String startTime = StringUtils.getFieldFromConcatString(aggrInfo, "\\|", Constants.FIELD_START_TIME);
				String dateHour = DateUtils.getDateHour(startTime);
				return new Tuple2<String, String>(dateHour, aggrInfo);
			}
		});

		//获取每天每小时的session数量
		Map<String, Object> countMap = time2sessionidRDD.countByKey();

		//按时间比列随机抽取算法,计算每天每小时要抽取session的索引
		//将<yyyy-MM-dd_HH,count>格式的map,转换成<yyyy-MM-dd,<HH,count>>的格式
		Map<String, Map<String, Long>> dateHourCountMap = new HashMap<String, Map<String, Long>>();
		for (Map.Entry<String, Object> countEntry : countMap.entrySet()) {
			String dateHour = countEntry.getKey();
			String date = dateHour.split("_")[0];
			String hour = dateHour.split("_")[1];
			long count = Long.valueOf(String.valueOf(countEntry.getValue()));

			Map<String, Long> hourCountMap = dateHourCountMap.get(date);
			if (hourCountMap == null) {
				hourCountMap = new HashMap<String, Long>();
				dateHourCountMap.put(date, hourCountMap);
			}
			hourCountMap.put(hour, count);
		}

		//开始实现按时间比例随机抽取算法
		//总共有100个session先按照天数,进行平分
		int extractNumberPerDay = 100 / dateHourCountMap.size();

		//<date,<hour,(1,3,4)>>,日期,小时,要抽取session的索引
		Map<String, Map<String, List<Integer>>> dateHourExtractMap = new HashMap<String, Map<String, List<Integer>>>();

		Random random = new Random(System.currentTimeMillis());

		for (Map.Entry<String, Map<String, Long>> dateHourCountEntry : dateHourCountMap.entrySet()) {
			String date = dateHourCountEntry.getKey();
			Map<String, Long> hourCountMap = dateHourCountEntry.getValue();

			//计算出这一天的session总数
			long sessionCount = 0;
			for (long hourCount : hourCountMap.values()) {
				sessionCount += hourCount;
			}

			Map<String, List<Integer>> hourExtractMap = dateHourExtractMap.get(date);
			if (hourExtractMap == null) {
				hourExtractMap = new HashMap<String, List<Integer>>();
				dateHourExtractMap.put(date, hourExtractMap);
			}

			//遍历每一个小时
			for (Map.Entry<String, Long> hourCountEntry : hourCountMap.entrySet()) {
				String hour = hourCountEntry.getKey();
				long count = hourCountEntry.getValue();

				//计算每一个小时session的数量,占据当天总的session数量的比例,直接乘以每天要抽取的数量,就可以计算出每小时需要抽取的session数量
				int hourExtractNumber = (int) (((double) count / (double) sessionCount) * extractNumberPerDay);
				if (hourExtractNumber > count) {
					hourExtractNumber = (int) count;
				}

				//先获取当前小时的存放随机数的list
				List<Integer> extractIndexList = hourExtractMap.get(hour);
				if (extractIndexList == null) {
					extractIndexList = new ArrayList<Integer>();
					hourExtractMap.put(hour, extractIndexList);
				}

				//生成上面计算出来的随机数
				for (int i = 0; i < hourExtractNumber; i++) {
					int extractIndex = random.nextInt((int) count);
					while (extractIndexList.contains(extractIndex)) {
						extractIndex = random.nextInt((int) count);
					}
					extractIndexList.add(extractIndex);
				}
			}
		}

		//广播大变量
		final Broadcast<Map<String, Map<String, List<Integer>>>> dateHourExtractMapBroadcast = jssc.broadcast(dateHourExtractMap);

		//遍历每小时的session,然后根据随机索引进行抽取
		//执行groupByKey算子,得到<dateHour,(session,aggrInfo)>
		JavaPairRDD<String, Iterable<String>> time2sessionsRDD = time2sessionidRDD.groupByKey();

		//用flatMap算子,遍历所有的<dateHour,(session,aggrInfo)> 格式的数据
		//然后遍历每天每小时的session
		//如果发现某个session恰巧在我们制定的这天这小时随机抽取索引中,则直接写入MYSQL的random_extract_session表
		//将抽取的sessionid返回,形成一个新的JavaPairRDD<String,String>
		//最后,用抽取出来的JavaPairRDD,去join他们的访问行为明细数据,写入session表
		JavaPairRDD<String, String> extractSessionidsRDD = time2sessionsRDD.flatMapToPair(
				new PairFlatMapFunction<Tuple2<String, Iterable<String>>, String, String>() {
					@Override
					public Iterable<Tuple2<String, String>> call(Tuple2<String, Iterable<String>> tuple) throws Exception {
						List<Tuple2<String, String>> extractSessionids = new ArrayList<Tuple2<String, String>>();

						String dateHour = tuple._1;
						String date = dateHour.split("_")[0];
						String hour = dateHour.split("_")[1];
						Iterator<String> iterator = tuple._2.iterator();

						//通过广播变量获取值
						Map<String, Map<String, List<Integer>>> dateHourExtractMap = dateHourExtractMapBroadcast.getValue();

						List<Integer> extractIndexList = dateHourExtractMap.get(date).get(hour);

						ISessionRandomExtractDAO iSessionRandomExtractDAO = DAOFactory.getISessionRandomExtractDAO();

						int index = 0;
						while (iterator.hasNext()) {
							String sessionAggrInfo = iterator.next();
							if (extractIndexList.contains(index)) {
								String sessionid = StringUtils.getFieldFromConcatString(sessionAggrInfo, "\\|", Constants.FIELD_SESSION_ID);

								//将数据写入MYSQL
								SessionRandomExtract sessionRandomExtract = new SessionRandomExtract();
								sessionRandomExtract.setTaskid(taskid);
								sessionRandomExtract.setSessionid(sessionid);
								sessionRandomExtract.setStartTime(StringUtils.getFieldFromConcatString(
										sessionAggrInfo, "\\|", Constants.FIELD_START_TIME));
								sessionRandomExtract.setSearchKeywords(StringUtils.getFieldFromConcatString(
										sessionAggrInfo, "\\|", Constants.FIELD_SEARCH_KEYWORDS));
								sessionRandomExtract.setClickCategoryIds(StringUtils.getFieldFromConcatString(
										sessionAggrInfo, "\\|", Constants.FIELD_CLICK_CATEGORY_IDS));
								iSessionRandomExtractDAO.insert(sessionRandomExtract);

								//将sessionid返回
								extractSessionids.add(new Tuple2<String, String>(sessionid, sessionid));
							}
							index++;
						}
						return extractSessionids;
					}
				});

		//获取抽取出来的session的明细数据
		JavaPairRDD<String, Tuple2<String, Row>> extractSessionDetailRDD = extractSessionidsRDD.join(sessionid2actionRDD);

		//遍历每条row,并写入mqsql
		extractSessionDetailRDD.foreach(new VoidFunction<Tuple2<String, Tuple2<String, Row>>>() {
			@Override
			public void call(Tuple2<String, Tuple2<String, Row>> tuple) throws Exception {
				Row row = tuple._2._2;

				SessionDetail sessionDetail = new SessionDetail();
				sessionDetail.setTaskid(taskid);
				sessionDetail.setUserid(row.getLong(1));
				sessionDetail.setSessionid(row.getString(2));
				sessionDetail.setPageid(row.getLong(3));
				sessionDetail.setActionTime(row.getString(4));
				sessionDetail.setSearchKeyword(row.getString(5));
				sessionDetail.setClickCategoryId(row.getLong(6));
				sessionDetail.setClickProductId(row.getLong(7));
				sessionDetail.setOrderCategoryIds(row.getString(8));
				sessionDetail.setOrderProductIds(row.getString(9));
				sessionDetail.setPayCategoryIds(row.getString(10));
				sessionDetail.setPayProductIds(row.getString(11));

				ISessionDetailDAO iSessionDetailDAO = DAOFactory.getISessionDetailDAO();
				iSessionDetailDAO.insert(sessionDetail);
			}
		});

	}

	/**
	 * 计算各范围session占比,并写入MYSQL
	 *
	 * @param value  聚合统计分析结果
	 * @param taskid 任务ID
	 */
	private static void calculateAndPersistAggrStat(String value, long taskid) {
		//从Accumulator统计串中获取值
		long session_count = Long.valueOf(StringUtils.getFieldFromConcatString(
				value, "\\|", Constants.SESSION_COUNT));

		long visit_length_1s_3s = Long.valueOf(StringUtils.getFieldFromConcatString(
				value, "\\|", Constants.TIME_PERIOD_1s_3s));
		long visit_length_4s_6s = Long.valueOf(StringUtils.getFieldFromConcatString(
				value, "\\|", Constants.TIME_PERIOD_4s_6s));
		long visit_length_7s_9s = Long.valueOf(StringUtils.getFieldFromConcatString(
				value, "\\|", Constants.TIME_PERIOD_7s_9s));
		long visit_length_10s_30s = Long.valueOf(StringUtils.getFieldFromConcatString(
				value, "\\|", Constants.TIME_PERIOD_10s_30s));
		long visit_length_30s_60s = Long.valueOf(StringUtils.getFieldFromConcatString(
				value, "\\|", Constants.TIME_PERIOD_30s_60s));
		long visit_length_1m_3m = Long.valueOf(StringUtils.getFieldFromConcatString(
				value, "\\|", Constants.TIME_PERIOD_1m_3m));
		long visit_length_3m_10m = Long.valueOf(StringUtils.getFieldFromConcatString(
				value, "\\|", Constants.TIME_PERIOD_3m_10m));
		long visit_length_10m_30m = Long.valueOf(StringUtils.getFieldFromConcatString(
				value, "\\|", Constants.TIME_PERIOD_10m_30m));
		long visit_length_30m = Long.valueOf(StringUtils.getFieldFromConcatString(
				value, "\\|", Constants.TIME_PERIOD_30m));

		long step_length_1_3 = Long.valueOf(StringUtils.getFieldFromConcatString(
				value, "\\|", Constants.STEP_PERIOD_1_3));
		long step_length_4_6 = Long.valueOf(StringUtils.getFieldFromConcatString(
				value, "\\|", Constants.STEP_PERIOD_4_6));
		long step_length_7_9 = Long.valueOf(StringUtils.getFieldFromConcatString(
				value, "\\|", Constants.STEP_PERIOD_7_9));
		long step_length_10_30 = Long.valueOf(StringUtils.getFieldFromConcatString(
				value, "\\|", Constants.STEP_PERIOD_10_30));
		long step_length_30_60 = Long.valueOf(StringUtils.getFieldFromConcatString(
				value, "\\|", Constants.STEP_PERIOD_30_60));
		long step_length_60 = Long.valueOf(StringUtils.getFieldFromConcatString(
				value, "\\|", Constants.STEP_PERIOD_60));

		// 计算各个访问时长和访问步长的范围
		double visit_length_1s_3s_ratio = NumberUtils.formatDouble(
				(double) visit_length_1s_3s / (double) session_count, 2);
		double visit_length_4s_6s_ratio = NumberUtils.formatDouble(
				(double) visit_length_4s_6s / (double) session_count, 2);
		double visit_length_7s_9s_ratio = NumberUtils.formatDouble(
				(double) visit_length_7s_9s / (double) session_count, 2);
		double visit_length_10s_30s_ratio = NumberUtils.formatDouble(
				(double) visit_length_10s_30s / (double) session_count, 2);
		double visit_length_30s_60s_ratio = NumberUtils.formatDouble(
				(double) visit_length_30s_60s / (double) session_count, 2);
		double visit_length_1m_3m_ratio = NumberUtils.formatDouble(
				(double) visit_length_1m_3m / (double) session_count, 2);
		double visit_length_3m_10m_ratio = NumberUtils.formatDouble(
				(double) visit_length_3m_10m / (double) session_count, 2);
		double visit_length_10m_30m_ratio = NumberUtils.formatDouble(
				(double) visit_length_10m_30m / (double) session_count, 2);
		double visit_length_30m_ratio = NumberUtils.formatDouble(
				(double) visit_length_30m / (double) session_count, 2);

		double step_length_1_3_ratio = NumberUtils.formatDouble(
				(double) step_length_1_3 / (double) session_count, 2);
		double step_length_4_6_ratio = NumberUtils.formatDouble(
				(double) step_length_4_6 / (double) session_count, 2);
		double step_length_7_9_ratio = NumberUtils.formatDouble(
				(double) step_length_7_9 / (double) session_count, 2);
		double step_length_10_30_ratio = NumberUtils.formatDouble(
				(double) step_length_10_30 / (double) session_count, 2);
		double step_length_30_60_ratio = NumberUtils.formatDouble(
				(double) step_length_30_60 / (double) session_count, 2);
		double step_length_60_ratio = NumberUtils.formatDouble(
				(double) step_length_60 / (double) session_count, 2);

		//将结果封装为domain对象
		SessionAggrStat sessionAggrStat = new SessionAggrStat();
		sessionAggrStat.setTaskid(taskid);
		sessionAggrStat.setSession_count(session_count);
		sessionAggrStat.setVisit_length_1s_3s_ratio(visit_length_1s_3s_ratio);
		sessionAggrStat.setVisit_length_4s_6s_ratio(visit_length_4s_6s_ratio);
		sessionAggrStat.setVisit_length_7s_9s_ratio(visit_length_7s_9s_ratio);
		sessionAggrStat.setVisit_length_10s_30s_ratio(visit_length_10s_30s_ratio);
		sessionAggrStat.setVisit_length_30s_60s_ratio(visit_length_30s_60s_ratio);
		sessionAggrStat.setVisit_length_1m_3m_ratio(visit_length_1m_3m_ratio);
		sessionAggrStat.setVisit_length_3m_10m_ratio(visit_length_3m_10m_ratio);
		sessionAggrStat.setVisit_length_10m_30m_ratio(visit_length_10m_30m_ratio);
		sessionAggrStat.setVisit_length_30m_ratio(visit_length_30m_ratio);
		sessionAggrStat.setStep_length_1_3_ratio(step_length_1_3_ratio);
		sessionAggrStat.setStep_length_4_6_ratio(step_length_4_6_ratio);
		sessionAggrStat.setStep_length_7_9_ratio(step_length_7_9_ratio);
		sessionAggrStat.setStep_length_10_30_ratio(step_length_10_30_ratio);
		sessionAggrStat.setStep_length_30_60_ratio(step_length_30_60_ratio);
		sessionAggrStat.setStep_length_60_ratio(step_length_60_ratio);

		//插入MySQL
		ISessionAggrStatDAO sessionAggrStatDAO = DAOFactory.getSessionAggrStatDAO();
		sessionAggrStatDAO.insert(sessionAggrStat);
	}


	/**
	 * 过滤session粒度的数据
	 *
	 * @param sessionid2AggrInfoRDD
	 * @param taskParam
	 * @param sessionAggrStatAccumulator
	 * @return
	 */
	private static JavaPairRDD<String, String> filterSessionAndStat(JavaPairRDD<String, String> sessionid2AggrInfoRDD,
	                                                                final JSONObject taskParam,
	                                                                final Accumulator<String> sessionAggrStatAccumulator) {
		//拼接筛选参数
		String startAge = ParamUtils.getParam(taskParam, Constants.PARAM_START_AGE);
		String endAge = ParamUtils.getParam(taskParam, Constants.PARAM_END_AGE);
		String professionals = ParamUtils.getParam(taskParam, Constants.PARAM_PROFESSIONALS);
		String cities = ParamUtils.getParam(taskParam, Constants.PARAM_CITIES);
		String sex = ParamUtils.getParam(taskParam, Constants.PARAM_SEX);
		String keywords = ParamUtils.getParam(taskParam, Constants.PARAM_KEYWORDS);
		String categoryIds = ParamUtils.getParam(taskParam, Constants.PARAM_CATEGORY_IDS);

		String _parameter = (startAge != null ? Constants.PARAM_START_AGE + "=" + startAge + "|" : "")
				+ (endAge != null ? Constants.PARAM_END_AGE + "=" + endAge + "|" : "")
				+ (professionals != null ? Constants.PARAM_PROFESSIONALS + "=" + professionals + "|" : "")
				+ (cities != null ? Constants.PARAM_CITIES + "=" + cities + "|" : "")
				+ (sex != null ? Constants.PARAM_SEX + "=" + sex + "|" : "")
				+ (keywords != null ? Constants.PARAM_KEYWORDS + "=" + keywords + "|" : "")
				+ (categoryIds != null ? Constants.PARAM_CATEGORY_IDS + "=" + categoryIds : "");
		//去除末尾的"|"
		if (_parameter.endsWith("\\|")) {
			_parameter = _parameter.substring(0, _parameter.length() - 1);
		}

		final String parameter = _parameter;

		//根据筛选参数进行过滤
		JavaPairRDD<String, String> filteredSessionid2AggrInfoRDD = sessionid2AggrInfoRDD.filter(new Function<Tuple2<String, String>, Boolean>() {
			@Override
			public Boolean call(Tuple2<String, String> tuple) throws Exception {
				//从tuple中取出session粒度的数据
				String aggrInfo = tuple._2;
				//按照年龄范围过滤(startAge,endAge)
				if (!!ValidUtils.between(aggrInfo, Constants.FIELD_AGE, parameter, Constants.PARAM_START_AGE,
						Constants.PARAM_END_AGE)) {
					return false;
				}
				//按照职业范围过滤(professionals)
				if (!ValidUtils.in(aggrInfo, Constants.FIELD_PROFESSIONAL, parameter, Constants.PARAM_PROFESSIONALS)) {
					return false;
				}
				//按照城市范围过滤(cities)
				if (!ValidUtils.in(aggrInfo, Constants.FIELD_CITY, parameter, Constants.PARAM_CITIES)) {
					return false;
				}
				//按照性别过滤
				if (!ValidUtils.equal(aggrInfo, Constants.FIELD_SEX, parameter, Constants.PARAM_SEX)) {
					return false;
				}
				// 按照搜索词进行过滤
				// 我们的session可能搜索了 火锅,蛋糕,烧烤
				// 我们的筛选条件可能是 火锅,串串香,iphone手机
				// 那么，in这个校验方法，主要判定session搜索的词中，有任何一个，与筛选条件中
				// 任何一个搜索词相当，即通过
				if (!ValidUtils.in(aggrInfo, Constants.FIELD_SEARCH_KEYWORDS, parameter, Constants.PARAM_KEYWORDS)) {
					return false;
				}
				//按照品类id过滤
				if (!ValidUtils.in(aggrInfo, Constants.FIELD_CATEGORY_ID, parameter, Constants.PARAM_CATEGORY_IDS)) {
					return false;
				}

				//走到这里说明该条session符合我们的要求,在这里对这条session进行聚合统计
				//计数session
				sessionAggrStatAccumulator.add(Constants.SESSION_COUNT);
				//计算session的访问时长和访问步长,并进行相应的累加
				long visitLength = Long.valueOf(StringUtils.getFieldFromConcatString
						(aggrInfo, "\\|", Constants.FIELD_VISIT_LENGTH));
				long stepLength = Long.valueOf(StringUtils.getFieldFromConcatString
						(aggrInfo, "\\|", Constants.FIELD_STEP_LENGTH));
				calculateVisitLength(visitLength);
				calculateStepLength(stepLength);
				return true;
			}

			/**
			 * 计算访问时长范围
			 *
			 * @param visitLength
			 */
			private void calculateVisitLength(long visitLength) {
				if (visitLength >= 1 && visitLength <= 3) {
					sessionAggrStatAccumulator.add(Constants.TIME_PERIOD_1s_3s);
				} else if (visitLength >= 4 && visitLength <= 6) {
					sessionAggrStatAccumulator.add(Constants.TIME_PERIOD_4s_6s);
				} else if (visitLength >= 7 && visitLength <= 9) {
					sessionAggrStatAccumulator.add(Constants.TIME_PERIOD_7s_9s);
				} else if (visitLength >= 10 && visitLength <= 30) {
					sessionAggrStatAccumulator.add(Constants.TIME_PERIOD_10s_30s);
				} else if (visitLength > 30 && visitLength <= 60) {
					sessionAggrStatAccumulator.add(Constants.TIME_PERIOD_30s_60s);
				} else if (visitLength > 60 && visitLength <= 180) {
					sessionAggrStatAccumulator.add(Constants.TIME_PERIOD_1m_3m);
				} else if (visitLength > 180 && visitLength <= 600) {
					sessionAggrStatAccumulator.add(Constants.TIME_PERIOD_3m_10m);
				} else if (visitLength > 600 && visitLength <= 1800) {
					sessionAggrStatAccumulator.add(Constants.TIME_PERIOD_10m_30m);
				} else if (visitLength > 1800) {
					sessionAggrStatAccumulator.add(Constants.TIME_PERIOD_30m);
				}
			}

			/**
			 * 计算访问步长范围
			 *
			 * @param stepLength
			 */
			private void calculateStepLength(long stepLength) {
				if (stepLength >= 1 && stepLength <= 3) {
					sessionAggrStatAccumulator.add(Constants.STEP_PERIOD_1_3);
				} else if (stepLength >= 4 && stepLength <= 6) {
					sessionAggrStatAccumulator.add(Constants.STEP_PERIOD_4_6);
				} else if (stepLength >= 7 && stepLength <= 9) {
					sessionAggrStatAccumulator.add(Constants.STEP_PERIOD_7_9);
				} else if (stepLength >= 10 && stepLength <= 30) {
					sessionAggrStatAccumulator.add(Constants.STEP_PERIOD_10_30);
				} else if (stepLength > 30 && stepLength <= 60) {
					sessionAggrStatAccumulator.add(Constants.STEP_PERIOD_30_60);
				} else if (stepLength > 60) {
					sessionAggrStatAccumulator.add(Constants.STEP_PERIOD_60);
				}
			}

		});

		return filteredSessionid2AggrInfoRDD;
	}


	/**
	 * 对行为数据按照session粒度聚合
	 *
	 * @param sqlContext sqlContext
	 * @param sessionid2actionRDD  行为数据RDD
	 * @return
	 */
	private static JavaPairRDD<String, String> aggregateBySession(JavaSparkContext jssc, SQLContext sqlContext, JavaPairRDD<String, Row> sessionid2actionRDD) {
		//对行为数据按sessionid分组
		JavaPairRDD<String, Iterable<Row>> sessionid2ActionsRDD = sessionid2actionRDD.groupByKey();
		//对每个sessionid分组进行聚合,将session中所有的搜索词和点击品类都聚合起来
		//转换为<userid,partAggrInfo(sessionid,searchKeyWords,clickCategoryIds)>
		JavaPairRDD<Long, String> userid2PartAggrInfoRDD = sessionid2ActionsRDD.mapToPair(
				new PairFunction<Tuple2<String, Iterable<Row>>, Long, String>() {
					@Override
					public Tuple2<Long, String> call(Tuple2<String, Iterable<Row>> tuple) throws Exception {
						String sessionid = tuple._1;
						Iterator<Row> iterator = tuple._2.iterator();

						StringBuffer searchKeyWordsBuffer = new StringBuffer("");
						StringBuffer clickCategoryIdsBuffer = new StringBuffer("");

						//session的起始时间和结束时间
						Date startTime = null;
						Date endTime = null;
						//session的访问步长
						int stepLength = 0;

						Long userId = null;
						while (iterator.hasNext()) {
							//提取每个访问行为的搜索词和点击品类字段
							Row row = iterator.next();
							if (userId == null) {
								userId = row.getLong(1);
							}
							String searchKeyWord = row.getString(5);
							Long clickCategoryId = row.getLong(6);
							//只有不为null,而且之前字符串中没有改字符串,则加入StringBuffer
							if (StringUtils.isNotEmpty(searchKeyWord) &&
									!searchKeyWordsBuffer.toString().contains(searchKeyWord)) {
								searchKeyWordsBuffer.append(searchKeyWord + ",");
							}
							if (clickCategoryId != null &&
									!clickCategoryIdsBuffer.toString().contains(String.valueOf(clickCategoryId))) {
								clickCategoryIdsBuffer.append(clickCategoryId + ",");
							}

							//计算session的开始时间和结束时间
							Date actionTime = DateUtils.parseTime(row.getString(4));
							if (startTime == null) {
								startTime = actionTime;
							}
							if (endTime == null) {
								endTime = actionTime;
							}
							if (actionTime.before(startTime)) {
								startTime = actionTime;
							}
							if (actionTime.after(endTime)) {
								endTime = actionTime;
							}
							//计算session步长
							stepLength++;
						}
						//去除字符串两边的","
						String searchKeyWords = StringUtils.trimComma(searchKeyWordsBuffer.toString());
						String clickCategoryIds = StringUtils.trimComma(clickCategoryIdsBuffer.toString());

						//计算session的访问时长
						long visitLength = (endTime.getTime() - startTime.getTime()) / 1000;

						//转成<userid,partAggrInfo>,partAggrInfo使用key=value|key=value的格式
						String partAggrInfo = Constants.FIELD_SESSION_ID + "=" + sessionid + "|"
								+ Constants.FIELD_SEARCH_KEYWORDS + "=" + searchKeyWords + "|"
								+ Constants.FIELD_CLICK_CATEGORY_IDS + "=" + clickCategoryIds + "|"
								+ Constants.FIELD_VISIT_LENGTH + "=" + visitLength + "|"
								+ Constants.FIELD_STEP_LENGTH + "=" + stepLength + "|"
								+ Constants.FIELD_START_TIME + "=" + DateUtils.formatTime(startTime);

						return new Tuple2<Long, String>(userId, partAggrInfo);
					}
				});
		//查询所有用户信息,并映射成<userid,Row>的格式
		String sql = "select * from user_info";
		JavaRDD<Row> userInfoRDD = sqlContext.sql(sql).javaRDD();
		JavaPairRDD<Long, Row> userid2InfoRDD = userInfoRDD.mapToPair(new PairFunction<Row, Long, Row>() {
			@Override
			public Tuple2<Long, Row> call(Row row) throws Exception {
				return new Tuple2<Long, Row>(row.getLong(0), row);
			}
		});
		/**
		 * 这里还是比较适合采用reduce join转化为map join的方式
		 * userid2PartAggrInfoRDD:可能数据量比较大,可能1000万条数据
		 * userid2InfoRDD:用户数量可能比较小,10万数据
		 */
		// List<Tuple2<Long, Row>> userInfos = userid2InfoRDD.collect();
		// final Broadcast<List<Tuple2<Long, Row>>> userInfosBroadcast = jssc.broadcast(userInfos);
		// JavaPairRDD<String, String> tunedRDD = userid2PartAggrInfoRDD.mapToPair(
		// 		new PairFunction<Tuple2<Long, String>, String, String>() {
		// 			@Override
		// 			public Tuple2<String, String> call(Tuple2<Long, String> tuple) throws Exception {
		// 				List<Tuple2<Long, Row>> userInfos = userInfosBroadcast.value();
		// 				Map<Long, Row> userInfoMap = new HashMap<Long, Row>();
		// 				for (Tuple2<Long, Row> userInfo : userInfos) {
		// 					userInfoMap.put(userInfo._1, userInfo._2);
		// 				}
		// 				String partAggrInfo = tuple._2;
		// 				Row userInfoRow = userInfoMap.get(tuple._1);
		// 				String sessionid = StringUtils.getFieldFromConcatString(partAggrInfo, "\\|", Constants.FIELD_SESSION_ID);
		// 				int age = userInfoRow.getInt(3);
		// 				String professional = userInfoRow.getString(4);
		// 				String city = userInfoRow.getString(5);
		// 				String sex = userInfoRow.getString(6);
		//
		// 				String fullAggrInfo = partAggrInfo + "|"
		// 						+ Constants.FIELD_AGE + "=" + age + "|"
		// 						+ Constants.FIELD_PROFESSIONAL + "=" + professional + "|"
		// 						+ Constants.FIELD_CITY + "=" + city + "|"
		// 						+ Constants.FIELD_SEX + "=" + sex;
		// 				return new Tuple2<String, String>(sessionid, fullAggrInfo);
		// 			}
		// 		});
		/**
		 * sample采样倾斜key单独进行join
		 */
		// JavaPairRDD<Long, String> sampleRDD = userid2PartAggrInfoRDD.sample(false, 0.1, 9);
		// JavaPairRDD<Long, Long> mappedSampleRDD = sampleRDD.mapToPair(
		// 		new PairFunction<Tuple2<Long, String>, Long, Long>() {
		// 			@Override
		// 			public Tuple2<Long, Long> call(Tuple2<Long, String> tuple) throws Exception {
		// 				return new Tuple2<Long, Long>(tuple._1, 1L);
		// 			}
		// 		});
		// JavaPairRDD<Long, Long> computedSampledRDD = mappedSampleRDD.reduceByKey(new Function2<Long, Long, Long>() {
		// 	@Override
		// 	public Long call(Long v1, Long v2) throws Exception {
		// 		return null;
		// 	}
		// });
		// JavaPairRDD<Long, Long> reversedSampledRDD = computedSampledRDD.mapToPair(
		// 		new PairFunction<Tuple2<Long, Long>, Long, Long>() {
		// 			@Override
		// 			public Tuple2<Long, Long> call(Tuple2<Long, Long> tuple) throws Exception {
		// 				return new Tuple2<Long, Long>(tuple._2, tuple._1);
		// 			}
		// 		});
		// //获取到数据倾斜的key
		// final Long skewedUserid = reversedSampledRDD.sortByKey(false).take(1).get(0)._2;
		// JavaPairRDD<Long, String> skewedRDD = userid2PartAggrInfoRDD.filter(
		// 		new Function<Tuple2<Long, String>, Boolean>() {
		// 			@Override
		// 			public Boolean call(Tuple2<Long, String> tuple) throws Exception {
		// 				return tuple._1.equals(skewedUserid);
		// 			}
		// 		});
		// JavaPairRDD<Long, String> commonRDD = userid2PartAggrInfoRDD.filter(
		// 		new Function<Tuple2<Long, String>, Boolean>() {
		// 			@Override
		// 			public Boolean call(Tuple2<Long, String> tuple) throws Exception {
		// 				return !tuple._1.equals(skewedUserid);
		// 			}
		// 		});
		// JavaPairRDD<Long, Tuple2<String, Row>> joinedRDD1 = skewedRDD.join(userid2InfoRDD);
		// JavaPairRDD<Long, Tuple2<String, Row>> joinedRDD2 = commonRDD.join(userid2InfoRDD);
		// JavaPairRDD<Long, Tuple2<String, Row>> joinedRDD = joinedRDD1.union(joinedRDD2);
		// joinedRDD.mapToPair(new PairFunction<Tuple2<Long, Tuple2<String, Row>>, String, String>() {
		// 	@Override
		// 	public Tuple2<String, String> call(Tuple2<Long, Tuple2<String, Row>> tuple) throws Exception {
		// 		String partAggrInfo = tuple._2._1;
		// 		Row userInfoRow = tuple._2._2;
		// 		String sessionid = StringUtils.getFieldFromConcatString(partAggrInfo, "\\|", Constants.FIELD_SESSION_ID);
		// 		int age = userInfoRow.getInt(3);
		// 		String professional = userInfoRow.getString(4);
		// 		String city = userInfoRow.getString(5);
		// 		String sex = userInfoRow.getString(6);
		//
		// 		String fullAggrInfo = partAggrInfo + "|"
		// 				+ Constants.FIELD_AGE + "=" + age + "|"
		// 				+ Constants.FIELD_PROFESSIONAL + "=" + professional + "|"
		// 				+ Constants.FIELD_CITY + "=" + city + "|"
		// 				+ Constants.FIELD_SEX + "=" + sex;
		// 		return new Tuple2<String, String>(sessionid, fullAggrInfo);
		// 	}
		// });
		/**
		 * 使用随机数和扩容表进行join
		 */
		// JavaPairRDD<String, Row> expandedRDD = userid2InfoRDD.flatMapToPair(
		// 		new PairFlatMapFunction<Tuple2<Long, Row>, String, Row>() {
		// 	@Override
		// 	public Iterable<Tuple2<String, Row>> call(Tuple2<Long, Row> tuple) throws Exception {
		// 		List<Tuple2<String, Row>> list = new ArrayList<Tuple2<String, Row>>();
		// 		for (int i = 0; i < 10; i++) {
		//
		// 			list.add(new Tuple2<String, Row>(i + "+" + tuple._1, tuple._2));
		// 		}
		// 		return list;
		// 	}
		// });
		// JavaPairRDD<String, String> mappedRDD = userid2PartAggrInfoRDD.mapToPair(
		// 		new PairFunction<Tuple2<Long, String>, String, String>() {
		// 	@Override
		// 	public Tuple2<String, String> call(Tuple2<Long, String> tuple) throws Exception {
		// 		Random random = new Random();
		// 		int prefix = random.nextInt(10);
		// 		return new Tuple2<String, String>(prefix + "_" + tuple._1, tuple._2);
		// 	}
		// });
		// JavaPairRDD<String, Tuple2<String, Row>> joinedRDD = mappedRDD.join(expandedRDD);
		//将session粒度聚合数据，与用户信息进行join
		JavaPairRDD<Long, Tuple2<String, Row>> userid2FullInfoRDD = userid2PartAggrInfoRDD.join(userid2InfoRDD);
		//对join起来的数据进行拼接,并返回<sessionid,fullAggrInfo>的格式数据
		JavaPairRDD<String, String> sessionid2FullAggrInfoRDD = userid2FullInfoRDD.mapToPair(new PairFunction<Tuple2<Long, Tuple2<String, Row>>, String, String>() {
			@Override
			public Tuple2<String, String> call(Tuple2<Long, Tuple2<String, Row>> tuple) throws Exception {
				String partAggrInfo = tuple._2._1;
				Row userInfoRow = tuple._2._2;
				String sessionid = StringUtils.getFieldFromConcatString(partAggrInfo, "\\|", Constants.FIELD_SESSION_ID);
				int age = userInfoRow.getInt(3);
				String professional = userInfoRow.getString(4);
				String city = userInfoRow.getString(5);
				String sex = userInfoRow.getString(6);

				String fullAggrInfo = partAggrInfo + "|"
						+ Constants.FIELD_AGE + "=" + age + "|"
						+ Constants.FIELD_PROFESSIONAL + "=" + professional + "|"
						+ Constants.FIELD_CITY + "=" + city + "|"
						+ Constants.FIELD_SEX + "=" + sex;
				return new Tuple2<String, String>(sessionid, fullAggrInfo);
			}
		});
		return sessionid2FullAggrInfoRDD;
	}

	/**
	 * @param sqlContext sqlContext
	 * @param taskParam  任务参数
	 * @return 行为数据RDD
	 */
	private static JavaRDD<Row> getActionRDDByDateRange(SQLContext sqlContext, JSONObject taskParam) {
		String startDate = ParamUtils.getParam(taskParam, Constants.PARAM_START_DATE);
		String endDate = ParamUtils.getParam(taskParam, Constants.PARAM_END_DATE);
		String sql = "select * from user_visit_action where date >= '" + startDate + "' and date <= '" + endDate + "'";
		return sqlContext.sql(sql).javaRDD();
	}


	/**
	 * 获取SQLContext
	 * 如果是本地测试生成SQLContext
	 * 如果是生产环境生产HiveContext
	 *
	 * @param sc
	 * @return
	 */
	public static SQLContext getSQLContext(SparkContext sc) {
		if (Boolean.valueOf(ConfigurationManager.getProperty(Constants.SPARK_LOCAL))) {
			return new SQLContext(sc);
		} else {
			return new HiveContext(sc);
		}
	}

	/**
	 * 生产模拟数据(只有本地模式,才会生产模拟数据)
	 *
	 * @param sc
	 * @param sqlContext
	 */
	public static void mockData(JavaSparkContext sc, SQLContext sqlContext) {
		if (Boolean.valueOf(ConfigurationManager.getProperty(Constants.SPARK_LOCAL))) {
			MockData.mock(sc, sqlContext);
		}
	}
}
