package com.stan.core.spark.page;

import com.alibaba.fastjson.JSONObject;
import com.stan.common.ConcatStrUtils;
import com.stan.common.DateUtils;
import com.stan.common.SparkUtils;
import com.stan.common.TaskUtils;
import com.stan.core.conf.ConfManager;
import com.stan.core.contants.Constants;
import com.stan.core.mapper.PageOneSkipConvertRateMapper;
import com.stan.core.mapper.TaskMapper;
import com.stan.core.mapper.factory.MapperFactory;
import com.stan.core.spark.SparkCalculator;
import com.stan.core.vo.PageOneSkipConvertRate;
import com.stan.core.vo.Task;
import com.stan.simulate.SimulateArgs;
import com.stan.simulate.SimulateDataSource;
import org.apache.commons.lang.StringUtils;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.api.java.function.PairFlatMapFunction;
import org.apache.spark.api.java.function.PairFunction;
import org.apache.spark.api.java.function.VoidFunction;
import org.apache.spark.sql.SQLContext;
import org.apache.spark.sql.hive.HiveContext;
import org.apache.spark.sql.Row;
import scala.Serializable;
import scala.Tuple2;
import java.util.*;

public class PageClickStreamSpark implements SparkCalculator, Serializable {
    public static void main(String[] args) {
        PageClickStreamSpark pageClickStreamSpark = new PageClickStreamSpark();
        pageClickStreamSpark.run(args);
    }

    @Override
    public void run(String[] args) {
        System.out.println(SparkUtils.isLocal());

        MapperFactory mapperFactory = MapperFactory.getMapperFactory();
        TaskMapper taskMapper = mapperFactory.getMapper(TaskMapper.class);


        System.out.println(SparkUtils.isLocal());

        // TODO: 远程模式 + 优化
        SparkConf sparkConf = new SparkConf()
                .setMaster(ConfManager.getProperty(Constants.CONF_SPARK_MASTER))
                .setAppName(Constants.CONF_USER_VISIT_ACTION_APP_NAME+"-"+ DateUtils.getTodayDate());
        JavaSparkContext javaSparkContext = new JavaSparkContext(sparkConf);

        // 创建Task
        Task task = new Task();
        task.setCreateTime(DateUtils.formatTime(new Date()));
        task.setTaskId(UUID.randomUUID().toString());
        System.out.println("当前任务ID："+task.getTaskId());
        task.setTaskType("页面单跳转化率计算");

        JSONObject taskParams = null;
        if(args.length <= 0 ){
            task.setTaskName(TaskUtils.genTaskName(true));
            taskParams = SimulateArgs.simulateTaskParamJSONObject();// 测试模拟信息
            task.setTaskParams(taskParams.toJSONString());
        }else{
            task.setTaskName(TaskUtils.genTaskName(false));
            String strTaskParam = args[0];
            taskParams = JSONObject.parseObject(strTaskParam);
        }
        taskParams.put(Constants.PARAM_TASK_ID,task.getTaskId());
        task.setTaskParams(taskParams.toJSONString());
        task.setTaskStatus(Task.TASK_CREATED);
        taskMapper.insert(task);

        // 进行计算
        task.setStartTime(DateUtils.formatTime(new Date()));
        task.setTaskStatus(Task.TASK_STARTED);
        taskMapper.update(task);
        compute(javaSparkContext,task);

        // 计算完成，更新task信息
        task.setFinishTime(DateUtils.formatTime(new Date()));
        task.setTaskStatus(Task.TASK_FINISHED);
        taskMapper.update(task);

        System.out.println("当前任务Task ID为:"+task.getTaskId());
        javaSparkContext.close();
    }

    @Override
    public void compute(JavaSparkContext javaSparkContext, Task task) {
        JSONObject taskParams = JSONObject.parseObject(task.getTaskParams());
        SQLContext sqlContext = new SQLContext(javaSparkContext);
        SimulateDataSource.mock(javaSparkContext,sqlContext);
        JavaRDD<Row> rowRDD = SparkUtils.queryUserActionDataToRDDWithDateRange(sqlContext,taskParams);

        JavaPairRDD<String,Row> session2rowRDD = rowRDD.mapToPair(new PairFunction<Row, String, Row>() {
            @Override
            public Tuple2<String, Row> call(Row row) throws Exception {
                String sessionId = row.getString(0);
                return new Tuple2<String,Row>(sessionId,row);
            }
        });
        session2rowRDD.cache();

        JavaPairRDD<String,Iterable<Row>> session2rowsRDD = session2rowRDD.groupByKey();

        // 转换成点击流
        JavaPairRDD<String,String> session2clickStream = changeToClickStream(session2rowsRDD);
        session2clickStream.foreach(new VoidFunction<Tuple2<String, String>>() {
            @Override
            public void call(Tuple2<String, String> stringStringTuple2) throws Exception {
                System.out.println(stringStringTuple2);
            }
        });

        // 计算切片数量
        Map<String,Object> splitAndCountMap = splitCalculate(session2clickStream);
        System.out.println(splitAndCountMap);

        // 获得起始页访问数量
        long startPageCount = getStartPageCount(session2rowRDD,taskParams);
        System.out.println(startPageCount);

        // 计算切片的单跳转化率
        Map<String,Double> split2OneStepConvertRate = calculateOneStepConvertRate(splitAndCountMap,startPageCount,
                taskParams);
        System.out.println(split2OneStepConvertRate);
        // 将页面单跳转化率持久化
        persistPageOneSkipConvertRate(taskParams.getString(Constants.PARAM_TASK_ID),
                split2OneStepConvertRate);

    }

    /**
     * 将单挑转化率持久化
     * @param taskId
     * @param oneSkipConvertRate
     */
    public static void  persistPageOneSkipConvertRate(
            String taskId,
            Map<String,Double> oneSkipConvertRate){
        List<PageOneSkipConvertRate> pageOneSkipConvertRates= new ArrayList<>();
        for(String split : oneSkipConvertRate.keySet()){
            double convertRate = oneSkipConvertRate.get(split);
            PageOneSkipConvertRate pageOneSkipConvertRate = new PageOneSkipConvertRate();
            pageOneSkipConvertRate.setTaskId(taskId);
            pageOneSkipConvertRate.setSplit(split);
            pageOneSkipConvertRate.setConvertRate(convertRate);
            pageOneSkipConvertRates.add(pageOneSkipConvertRate);
        }

        MapperFactory factory = MapperFactory.getMapperFactory();
        PageOneSkipConvertRateMapper pageOneSkipConvertRateMapper = factory.getMapper(PageOneSkipConvertRateMapper.class);
        pageOneSkipConvertRateMapper.insertBatch(pageOneSkipConvertRates);
    }


    /**
     * 计算单跳转化率
     * @param splitAndCountMap
     * @param startPageCount
     * @return
     */
    public static Map<String,Double> calculateOneStepConvertRate(Map<String,Object> splitAndCountMap,
                                                                 long startPageCount,
                                                                 JSONObject taskParams){
        Map<String,Double> split2oneStepConvertRate = new HashMap<String,Double>();
        String targetClickStream = taskParams.getString(Constants.PARAM_TARGET_PAGE_FLOW);
        String[] targetClickStreamArr = targetClickStream.split("-");


        Long fromSplitCount = startPageCount;
        Long toSplitCount;

        for(int i = 0; i < targetClickStreamArr.length-1; i++){
            double oneStepConvertRate;
            String split = targetClickStreamArr[i]+"-"+targetClickStreamArr[i+1];
            Object countObj = splitAndCountMap.get(split);
            toSplitCount = (countObj == null)?0:(Long) countObj;
            oneStepConvertRate = (double)toSplitCount/fromSplitCount;
            fromSplitCount=toSplitCount;


            split2oneStepConvertRate.put(split,oneStepConvertRate);
        }
        return split2oneStepConvertRate;
    }



    /**
     * 获得起始页的访问数量
     * @param session2rowRDD
     * @return
     */
    public static long getStartPageCount(JavaPairRDD<String,Row> session2rowRDD,JSONObject taskParams){
        // 获得起始页
        String targetClickStream = taskParams.getString(Constants.PARAM_TARGET_PAGE_FLOW);
        final String startPageId = targetClickStream.split("-")[0];

        JavaPairRDD<String,Row> session2rowRDDwithStartPageId = session2rowRDD.filter(
                new Function<Tuple2<String, Row>, Boolean>() {
            @Override
            public Boolean call(Tuple2<String, Row> stringRowTuple2) throws Exception {
                boolean rtn = false;
                String pageId = stringRowTuple2._2.getString(4);
                if(StringUtils.isNotEmpty(pageId) && pageId.equals(startPageId)){
                    rtn = true;
                }else{
                    rtn = false;
                }
                return rtn;
            }
        });

        return session2rowRDDwithStartPageId.count();
    }



    /**
     * 将session2actionRDD转换成session2clickStream
     * 换算成点击流
     * @param session2RowsRDD
     * @return
     */
    public static JavaPairRDD<String,String> changeToClickStream(JavaPairRDD<String,Iterable<Row>> session2RowsRDD){
        JavaPairRDD<String,String> session2clickStream = session2RowsRDD.mapToPair(
                new PairFunction<Tuple2<String, Iterable<Row>>, String, String>() {
            @Override
            public Tuple2<String, String> call(Tuple2<String, Iterable<Row>> stringIterableTuple2) throws Exception {
                Iterator<Row> rowIterator = stringIterableTuple2._2.iterator();
                List<Row> rows = new ArrayList<Row>();
                while(rowIterator.hasNext()){
                    Row row = rowIterator.next();
                    String pageId = row.getString(4);
                    if(StringUtils.isNotEmpty(pageId)){
                        rows.add(row);
                    }
                }

                // 依据动作发生时间：从早到晚进行排序
                Collections.sort(rows, new Comparator<Row>() {
                    @Override
                    public int compare(Row o1, Row o2) {
                        String actionTime1 = o1.getString(3);
                        String actionTime2 = o2.getString(3);

                        if(StringUtils.isNotEmpty(actionTime1) && actionTime1.equals(actionTime2)){
                            return 0;
                        }else if(DateUtils.before(actionTime1,actionTime2)){
                            return 1;
                        }else{
                            return -1;
                        }
                    }
                });

                // 制作点击流
                StringBuffer clickStreamSB = new StringBuffer();
                for(Row row : rows){
                    String pageId = row.getString(4);
                    clickStreamSB.append(pageId+"-");
                }
                String clickStream = ConcatStrUtils.trimSeqarator(clickStreamSB.toString(),"-");
                return new Tuple2<String, String>(stringIterableTuple2._1,clickStream);
            }
        });
        return session2clickStream;
    }


    /**
     *  (session_id,clickStream) -> (x-y,1)...
     *  从点击流中计算切片数量
     * @param session2clickStream
     * @return
     */
    public static Map<String,Object> splitCalculate(JavaPairRDD<String,String> session2clickStream){
        JavaPairRDD<String,Integer> split2One = session2clickStream.flatMapToPair(
                new PairFlatMapFunction<Tuple2<String, String>, String, Integer>() {
            @Override
            public Iterable<Tuple2<String, Integer>> call(Tuple2<String, String> stringStringTuple2) throws Exception {
                List<Tuple2<String, Integer>> rtn = new ArrayList<Tuple2<String, Integer>>();

                String clickStream = stringStringTuple2._2;
                String[] pageIds = clickStream.split("-");
                for(int i=0; i < pageIds.length-1; i++){
                    String fromPage = pageIds[i];
                    String toPage = pageIds[i+1];
                    rtn.add(new Tuple2<String, Integer>(fromPage+"-"+toPage,1));
                }
                return rtn;
            }
        });

        Map<String,Object> splitAndCountMap = split2One.countByKey();
        return splitAndCountMap;
    }

    /**
     * 这里本地模式返回SQLContext对象,生产环境返回HiveContext对象
     * @param javaSparkContext
     * @return
     */
    public SQLContext getSQLContext(JavaSparkContext javaSparkContext){
        String sparkMaster = ConfManager.getProperty(Constants.CONF_SPARK_MASTER) ;
        // 本地模式
        if(SparkUtils.isLocal()){
            return new SQLContext(javaSparkContext.sc());
        }else { //生产环境运行
            return new HiveContext(javaSparkContext.sc());
        }
    }

}
