package com.bg.sparkproject.spark;

import com.alibaba.fastjson.JSONObject;
import com.bg.sparkproject.config.ConfigurationManager;
import com.bg.sparkproject.constant.Constants;
import com.bg.sparkproject.dao.TaskDao;
import com.bg.sparkproject.dao.factory.DaoFactory;
import com.bg.sparkproject.domain.Task;
import com.bg.sparkproject.test.MockData;
import com.bg.sparkproject.utils.ParamUtils;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.PairFunction;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SQLContext;
import org.apache.spark.sql.hive.HiveContext;
import scala.Tuple2;

/**
 * @Auther: zhengshunzhi
 * @Date: 2018/12/19 15:43
 * @Description: 用户点击session分析
 */
public class UserVisitSessionAnalyzeSpark {

    static Boolean local = ConfigurationManager.getBoolean(Constants.SPARK_LOCAL);

    public static void main(String[] args) {
        //构建sparks上下文
        SparkConf conf = new SparkConf().setAppName(Constants.SPARK_APP_NAME_SESSION).setMaster("local[2]");
        JavaSparkContext sc = new JavaSparkContext(conf);
        SQLContext sqlContext = getSqlContext(sc);
        //模拟数据
        mockData(sc, sqlContext);
        //获取taskId
        Long taskId = ParamUtils.getTaskIdFromArgs(args, "taskId");
        //创建dao组件
        TaskDao taskDao = DaoFactory.getTaskDao();
        Task task = taskDao.findById(taskId);
        JSONObject jsonObject = JSONObject.parseObject(task.getTaskParam());
        //获取该时间段内的用户session数据
        JavaRDD<Row> actionByDateRange = getActionByDateRange(sqlContext, jsonObject);


        //关闭spark上下文
        sc.close();
    }

    /**
     * 功能描述: sqlContext
     *
     * @param:
     * @return:
     * @auther: bonree
     * @date: 2018/12/19 15:50
     * 如果在本地就生成sqlContext,如果在生产环境就生成hiveContext
     */
    private static SQLContext getSqlContext(JavaSparkContext sc) {
        if (local) {
            return new SQLContext(sc);
        }
        return new HiveContext(sc);
    }

    /**
     * 功能描述: 生成模拟数据测试
     *
     * @param:
     * @return:
     * @auther: bonree
     * @date: 2018/12/19 16:11
     */
    private static void mockData(JavaSparkContext sc, SQLContext sqlContext) {
        if (local) {
            MockData.mock(sc, sqlContext);
        }
    }

    /**
     * 功能描述: 获取该时间段的用户行为数据
     *
     * @param: sqlContext
     * @param: taskParam
     * @return:
     * @auther: bonree
     * @date: 2019/1/3 11:59
     */
    private static JavaRDD<Row> getActionByDateRange(SQLContext sqlContext, JSONObject taskParam) {
        String startDate = ParamUtils.getParam(taskParam, Constants.PARAM_START_DATE);
        String endDate = ParamUtils.getParam(taskParam, Constants.PARAM_END_DATE);

        String sql = "select * from user_visit_action where date >= " + startDate + " and date < " + endDate;
        Dataset<Row> ds = sqlContext.sql(sql);
        return ds.javaRDD();
    }

    /**
     * 功能描述: actionRDD中存的是用户的一行行行为记录，现在需要将row转换成<String,Row>
     *
     * @param: actionRDD
     * @return:
     * @auther: bonree
     * @date: 2019/1/3 14:01
     */
    private static JavaPairRDD<String, String> aggregateBySession(JavaRDD actionRDD) {
        //第一个参数相当于函数的输入，第二三个参数相当于函数的输出
        JavaPairRDD sessionIdToActionRDD = actionRDD.mapToPair(new PairFunction<Row, String, Row>() {

            @Override
            public Tuple2<String, Row> call(Row row) throws Exception {
                return new Tuple2<String, Row>(row.getString(2), row);
            }
        });

        //对数据按sessionId进行分组
        JavaPairRDD<String, Iterable<Row>> sessionIdToActionsRDD = sessionIdToActionRDD.groupByKey();
        //将session中所有的搜索词和点击品类进行聚合

        return null;
    }
}

