package spark.session;

import com.alibaba.fastjson.JSONObject;

import conf.ConfigurationManager;
import constant.Constants;


import java.util.Date;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Random;

import dao.*;
import dao.factory.DAOFactory;
import domain.*;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.storage.StorageLevel;
import scala.Tuple2;
import test.MockData;
import util.*;

/**
 * 用户访问session分析Spark作业
 * <p>
 * 接收用户创建的分析任务，用户可能指定的条件如下：
 * <p>
 * 1、时间范围：起始日期~结束日期
 * 2、性别：男或女
 * 3、年龄范围
 * 4、职业：多选
 * 5、城市：多选
 * 6、搜索词：多个搜索词，只要某个session中的任何一个action搜索过指定的关键词，那么session就符合条件
 * 7、点击品类：多个品类，只要某个session中的任何一个action点击过某个品类，那么session就符合条件
 * <p>
 * 我们的spark作业如何接受用户创建的任务？
 * <p>
 * J2EE平台在接收用户创建任务的请求之后，会将任务信息插入MySQL的task表中，任务参数以JSON格式封装在task_param
 * 字段中
 * <p>
 * 接着J2EE平台会执行我们的spark-submit shell脚本，并将taskid作为参数传递给spark-submit shell脚本
 * spark-submit shell脚本，在执行时，是可以接收参数的，并且会将接收的参数，传递给Spark作业的main函数
 * 参数就封装在main函数的args数组中
 * <p>
 * 这是spark本身提供的特性
 *
 * @author Administrator
 */
public class UserVisitSessionAnalyzeSpark {

    public static void main(String[] args) {
        SparkConf conf = new SparkConf().setAppName("UserVisitSessionAnalyzeSpark").setMaster("local[2]");
        JavaSparkContext sc = new JavaSparkContext(conf);
        SparkSession sparkSession = SparkSession.builder().enableHiveSupport().config(conf).getOrCreate();
        MockData.mockData(sparkSession);
        ITaskDAO taskDAO = DAOFactory.getTaskDAO();
        Task task = taskDAO.findById(1);
        if (task == null) {
            System.out.println("cannot find this task with id ");
            return;
        }

        JSONObject taskParam = JSONObject.parseObject(task.getTaskParam());

        JavaRDD<Row> actionJavaRDD = getActionRDDByDateRange(sparkSession, taskParam);

        JavaPairRDD<String, Row> sessionid2actionRDD = getSeesionid2actionRDD(actionJavaRDD);

        sessionid2actionRDD = sessionid2actionRDD.persist(StorageLevel.MEMORY_ONLY());

        JavaPairRDD<String, String> sessionid2fullAggrInfoRDD = aggregateBySession(sparkSession, sessionid2actionRDD);

        SessionAggrStatAccumulator sessionAggrStatAccumulator = new SessionAggrStatAccumulator();

        sc.sc().register(sessionAggrStatAccumulator, "sessionAggrStatAccumulator");

        JavaPairRDD<String, String> filteredSessiond2fullinfoRDD = filterSessionAndAggrStat(
                sessionid2fullAggrInfoRDD, taskParam, sessionAggrStatAccumulator);
        //一般使用累加器后要persisit和action避免多加与少加
        JavaPairRDD<String, String> persist = filteredSessiond2fullinfoRDD.persist(StorageLevel.MEMORY_ONLY());
        //因为懒加载机制所以要触发action累计器才会实现累加，map才不为空
        persist.count();
        HashMap<String, Integer> value = sessionAggrStatAccumulator.value();
        //这个实在driver端去写数据
        calculateAndPersistAggrStat(value);


    }


    public static void calculateAndPersistAggrStat(Map<String,Integer> map){
        String task_id = DateUtils.formatTime(new Date());
        double session_count = (double)map.get(Constants.SESSION_COUNT);
        double visit_lenght_1s_20s_ratio = (double)map.get(Constants.TIME_PERIOD_1s_20s)/session_count;
        double visit_lenght_21s_40s_ratio = (double)map.get(Constants.TIME_PERIOD_21s_40s)/session_count;
        double visit_lenght_41s_60s_ratio =(double) map.get(Constants.TIME_PERIOD_41s_60s)/session_count;
        double visit_lenght_61s_80s_ratio = (double)map.get(Constants.TIME_PERIOD_61s_80s)/session_count;
        double visit_lenght_81s_100s_ratio = (double)map.get(Constants.TIME_PERIOD_81s_100s)/session_count;
        double visit_lenght_101s_120s_ratio = (double)map.get(Constants.TIME_PERIOD_101s_120s)/session_count;
        double step_lenght_1_2_ratio =(double) map.get(Constants.STEP_PERIOD_1_2)/session_count;
        double step_lenght_3_4_ratio=(double) map.get(Constants.STEP_PERIOD_3_4)/session_count;
        double step_lenght_5_6_ratio =(double) map.get(Constants.STEP_PERIOD_5_6)/session_count;
        double step_lenght_7_8_ratio =(double) map.get(Constants.STEP_PERIOD_7_8)/session_count;

        SessionAggrStat sessionAggrStat = new SessionAggrStat();
        sessionAggrStat.setTaskid(task_id);
        sessionAggrStat.setSession_count(session_count);
        sessionAggrStat.setVisit_length_1s_20s_ratio(visit_lenght_1s_20s_ratio);
        sessionAggrStat.setVisit_length_21s_40s_ratio(visit_lenght_21s_40s_ratio);
        sessionAggrStat.setVisit_length_41s_60s_ratio(visit_lenght_41s_60s_ratio);
        sessionAggrStat.setVisit_length_61s_80s_ratio(visit_lenght_61s_80s_ratio);
        sessionAggrStat.setVisit_length_81s_100s_ratio(visit_lenght_81s_100s_ratio);
        sessionAggrStat.setVisit_length_101s_120s_ratio(visit_lenght_101s_120s_ratio);
        sessionAggrStat.setStep_length_1_2_ratio(step_lenght_1_2_ratio);
        sessionAggrStat.setStep_length_3_4_ratio(step_lenght_3_4_ratio);
        sessionAggrStat.setStep_length_5_6_ratio(step_lenght_5_6_ratio);
        sessionAggrStat.setStep_length_7_8_ratio(step_lenght_7_8_ratio);

        ISessionAggrStatDAO sessionAggrStatDAO = DAOFactory.getSessionAggrStatDAO();
        sessionAggrStatDAO.insert(sessionAggrStat);
    }

    public static JavaPairRDD<String, String> filterSessionAndAggrStat(
            JavaPairRDD<String, String> sessionid2FullAggrInfoRDD,
            final JSONObject taskParam,
            final SessionAggrStatAccumulator sessionAggrStatAccumulator) {
        //{startDate:"2020-05-01", endDate:"2020-05-02", startAge: 10, endAge: 60, cities: "", sex:"",targetPageFlow:"1,2,3,4,5 "}
        String startAge = ParamUtils.getParam(taskParam, Constants.PARAM_START_AGE);
        String endAge = ParamUtils.getParam(taskParam, Constants.PARAM_END_AGE);
        String cities = ParamUtils.getParam(taskParam, Constants.PARAM_CITIES);
        String sex = ParamUtils.getParam(taskParam, Constants.PARAM_SEX);

        String parameter = Constants.PARAM_START_AGE + "=" + startAge + "|"
                + Constants.PARAM_END_AGE + "=" + endAge + "|"
                + Constants.PARAM_CITIES + "=" + cities + "|"
                + Constants.PARAM_SEX + "=" + sex;
        /**
         * 校验数据中的指定字段，是否在指定范围内
         * @param data 数据
         * @param dataField 数据字段
         * @param parameter 参数
         * @param startParamField 起始参数字段
         * @param endParamField 结束参数字段
         * @return 校验结果
         */
        JavaPairRDD<String, String> filteredSessiond2fullinfoRDD = sessionid2FullAggrInfoRDD.filter(tuple -> {
            String fullAggrInfo = tuple._2;
            if (!ValidUtils.between(fullAggrInfo, Constants.FIELD_AGE,
                    parameter, Constants.PARAM_START_AGE, Constants.PARAM_END_AGE)) {
                return false;
            }
            if (!ValidUtils.in(fullAggrInfo, Constants.FIELD_CITY,
                    parameter, Constants.PARAM_CITIES)) {
                return false;
            }
            if (!ValidUtils.equal(fullAggrInfo, Constants.FIELD_SEX,
                    parameter, Constants.PARAM_SEX)) {
                return false;
            }
            sessionAggrStatAccumulator.add(Constants.SESSION_COUNT);

            int visitLength = Integer.parseInt(StringUtils.getFieldFromConcatString(
                    fullAggrInfo, "\\|", Constants.FIELD_VISIT_LENGTH));
            int stepLength = Integer.parseInt(StringUtils.getFieldFromConcatString(
                    fullAggrInfo, "\\|", Constants.FIELD_STEP_LENGTH));

            if (visitLength >= 1 && visitLength <= 20) {
                sessionAggrStatAccumulator.add(Constants.TIME_PERIOD_1s_20s);
            } else if (visitLength >= 21 && visitLength <= 40) {
                sessionAggrStatAccumulator.add(Constants.TIME_PERIOD_21s_40s);
            } else if (visitLength >= 41 && visitLength <= 60) {
                sessionAggrStatAccumulator.add(Constants.TIME_PERIOD_41s_60s);
            } else if (visitLength >= 61 && visitLength <= 80) {
                sessionAggrStatAccumulator.add(Constants.TIME_PERIOD_61s_80s);
            } else if (visitLength >= 81 && visitLength <= 100) {
                sessionAggrStatAccumulator.add(Constants.TIME_PERIOD_81s_100s);
            } else if (visitLength >= 101 && visitLength <= 120) {
                sessionAggrStatAccumulator.add(Constants.TIME_PERIOD_101s_120s);
            }else {
                sessionAggrStatAccumulator.add("other");
            }

            if (stepLength >= 1 && stepLength <= 2) {
                sessionAggrStatAccumulator.add(Constants.STEP_PERIOD_1_2);
            } else if (stepLength >= 3 && stepLength <= 4) {
                sessionAggrStatAccumulator.add(Constants.STEP_PERIOD_3_4);
            } else if (stepLength >= 5 && stepLength <= 6) {
                sessionAggrStatAccumulator.add(Constants.STEP_PERIOD_5_6);
            } else if (stepLength >= 7 && stepLength <= 8) {
                sessionAggrStatAccumulator.add(Constants.STEP_PERIOD_7_8);
            }
            return true;
        });

        return filteredSessiond2fullinfoRDD;
    }


    private static JavaPairRDD<String, String> aggregateBySession(SparkSession sparkSession,
                                                                  JavaPairRDD<String, Row> sessionid2actionRDD) {
        JavaPairRDD<String, Iterable<Row>> sessionid2ActionsRDD = sessionid2actionRDD.groupByKey();
        JavaPairRDD<String, String> userid2partArrInfoRDD = sessionid2ActionsRDD.mapToPair(tuple -> {
            String sessionid = tuple._1;
            Iterator<Row> iterator = tuple._2.iterator();
            StringBuffer clickProductBuffer = new StringBuffer();
            Date startTime = null;
            Date endTime = null;
            int stepLength = 0;
            String userid = null;
            while (iterator.hasNext()) {
                Row row = iterator.next();
                userid = row.getString(2);
                String clickProductId = row.getString(5);
                if (clickProductId != null) {
                    if (!clickProductBuffer.toString().contains(clickProductId)) {
                        clickProductBuffer.append(clickProductId + ",");
                    }
                }
                Date actionTime = DateUtils.parseTime(row.getAs("actionTime"));
                if (startTime == null) {
                    startTime = actionTime;
                }
                if (endTime == null) {
                    endTime = actionTime;
                }

                if (actionTime.before(startTime)) {
                    startTime = actionTime;
                }
                if (actionTime.after(endTime)) {
                    endTime = actionTime;
                }
                stepLength++;

            }

            String clickProductIds = StringUtils.trimComma(clickProductBuffer.toString());
            long visitLength = (endTime.getTime() - startTime.getTime()) / 1000;
            String partArrInfo = Constants.FIELD_SESSION_ID + "=" + sessionid + "|"
                    + Constants.FIELD_CLICK_PRODUCT_IDS + "=" + clickProductIds + "|"
                    + Constants.FIELD_VISIT_LENGTH + "=" + visitLength + "|"
                    + Constants.FIELD_STEP_LENGTH + "=" + stepLength + "|";
            return new Tuple2<>(userid, partArrInfo);
        });

        String sql = "select * from user_info";
        JavaRDD<Row> userInfoRDD = sparkSession.sql(sql).javaRDD();
        JavaPairRDD<String, Row> userid2infoRDD = userInfoRDD.mapToPair(row -> new Tuple2<>(row.getString(0), row));
        JavaPairRDD<String, Tuple2<String, Row>> userid2fullInfoRDD = userid2partArrInfoRDD.join(userid2infoRDD);
        JavaPairRDD<String, String> sessionid2fullAggrInfoRDD = userid2fullInfoRDD.mapToPair(tuple -> {
            String partAggrInfo = tuple._2._1;
            Row userInfoRow = tuple._2._2;
            //这个分隔符要反转义，要不然会出错
            String sessionid = StringUtils.getFieldFromConcatString(partAggrInfo, "\\|", Constants.FIELD_SESSION_ID);

            String name = userInfoRow.getString(1);
            String sex = userInfoRow.getString(2);
            int age = userInfoRow.getInt(3);
            String fullInfo = partAggrInfo + "|"
                    + Constants.FIELD_NAME + "=" + name + "|"
                    + Constants.FIELD_SEX + "=" + sex + "|"
                    + Constants.FIELD_AGE + "=" + age + "|";
            return new Tuple2<>(sessionid, fullInfo);
        });
        return sessionid2fullAggrInfoRDD;
    }

    private static JavaPairRDD<String, Row> getSeesionid2actionRDD(JavaRDD<Row> actionJavaRDD) {
        return actionJavaRDD.mapToPair(row -> new Tuple2<>(row.getString(3), row));
    }

    public static JavaRDD<Row> getActionRDDByDateRange(SparkSession sparkSession, JSONObject taskParam) {
        String startDate = ParamUtils.getParam(taskParam, Constants.PARAM_START_DATE);
        String endData = ParamUtils.getParam(taskParam, Constants.PARAM_END_DATE);

        String sql = "select * " +
                "from user_visit_action " +
                "where date>='" + startDate + "' " +
                "and date<='" + endData + "'";
        Dataset<Row> actionDS = sparkSession.sql(sql);
        return actionDS.toJavaRDD();
    }
}
