package spark.product;

import com.alibaba.fastjson.JSONObject;
import conf.ConfigurationManager;
import constant.Constants;
import dao.IProvinceTopProductDAO;
import dao.ITaskDAO;
import dao.factory.DAOFactory;
import domain.ProvinceTopProduct;
import domain.Task;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.RowFactory;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.types.DataTypes;
import org.apache.spark.sql.types.StructField;
import org.apache.spark.sql.types.StructType;
import scala.Tuple2;
import test.MockData;
import util.DateUtils;
import util.ParamUtils;

import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.List;

public class AreaTop3ProductSpark {
    public static void main(String[] args) {
        SparkConf conf = new SparkConf().setAppName("AreaTop3ProductSpark").setMaster("local[2]");
        SparkSession sparkSession = SparkSession.builder().enableHiveSupport().config(conf).getOrCreate();
        MockData.mockData(sparkSession);
        ITaskDAO taskDAO = DAOFactory.getTaskDAO();
        Task task = taskDAO.findById(2);
        JSONObject taskParam = JSONObject.parseObject(task.getTaskParam());

        //注册自定义函数
        sparkSession.udf().register("group_concat_distinct", new GroupConcatDistinctUDAF());


        JavaPairRDD<String, Row> cityId2clickActionRDD = getActionRDDByDateRange(sparkSession, taskParam);
        JavaPairRDD<String, Row> cityId2cityInfoRDD = getCityId2cityInfoRDD(sparkSession);
        generateTempAreaProductBasicTable(sparkSession, cityId2clickActionRDD, cityId2cityInfoRDD);
        JavaRDD<Row> provinceTopProductRDD = generateTempAreaProductClickCountTable(sparkSession);
        //这个写mysql和之前的不一样，计算的结果数量比较少
        //直接collect到本地，然后在本地的连接池获得连接
        //采用批量插入的方式，一次性的插入mysql即可
        List<Row> rowList = provinceTopProductRDD.collect();
        persisitProvinceTopProduct(rowList);


    }

    private static void persisitProvinceTopProduct(List<Row> rowList) {
//        province|product_id|click_count|  city_infos(row)
        ArrayList<ProvinceTopProduct> provinceTopProductList = new ArrayList<>();
        for (Row row : rowList) {
            ProvinceTopProduct provinceTopProduct = new ProvinceTopProduct();
            provinceTopProduct.setTaskId(DateUtils.formatTimeMinute(new Date()));
            provinceTopProduct.setProvince(row.getString(0));
            provinceTopProduct.setProductid(row.getString(1));
            provinceTopProduct.setClickCount((int) row.getLong(2));
            provinceTopProduct.setCityInfos(row.getString(3));
            provinceTopProductList.add(provinceTopProduct);
        }
        IProvinceTopProductDAO provinceTopProductDAO = DAOFactory.getProvinceTopProductDAO();
        provinceTopProductDAO.insertBatch(provinceTopProductList);


    }

    private static JavaRDD<Row> generateTempAreaProductClickCountTable(SparkSession sparkSession) {
        String sql = "select "
                + "province,"
                + "product_id,"
                + "count(*) click_count, "
                + "group_concat_distinct(city_id) city_infos "
                + "from click_product_basic "
                + "Group BY province,product_id "
                + "order by click_count desc";
        Dataset<Row> df = sparkSession.sql(sql);
        //df.show();
        return df.toJavaRDD();
    }


    private static void generateTempAreaProductBasicTable(
            SparkSession sparkSession,
            JavaPairRDD<String, Row> cityId2clickActionRDD,
            JavaPairRDD<String, Row> cityId2cityInfoRDD) {
        JavaPairRDD<String, Tuple2<Row, Row>> joinRDD = cityId2clickActionRDD.join(cityId2cityInfoRDD);
        JavaRDD<Row> mappedRDD = joinRDD.map(tuple -> {
            String cityId = tuple._1;
            Row clickAction = tuple._2._1;
            Row cityInfo = tuple._2._2;
            String productId = clickAction.getString(5);
            String province = cityInfo.getString(1);
            return RowFactory.create(cityId, province, productId);
        });
        ArrayList<StructField> structFields = new ArrayList<>();
        structFields.add(DataTypes.createStructField("city_id", DataTypes.StringType, true));
        structFields.add(DataTypes.createStructField("province", DataTypes.StringType, true));
        structFields.add(DataTypes.createStructField("product_id", DataTypes.StringType, true));
        StructType schema = DataTypes.createStructType(structFields);
        Dataset<Row> df = sparkSession.createDataFrame(mappedRDD, schema);
        df.registerTempTable("click_product_basic");

    }

    private static JavaPairRDD<String, Row> getCityId2cityInfoRDD(SparkSession sparkSession) {
        String url = ConfigurationManager.getProperty(Constants.JDBC_URL);
        String user = ConfigurationManager.getProperty(Constants.JDBC_USER);
        String password = ConfigurationManager.getProperty(Constants.JDBC_PASSWORD);
        HashMap<String, String> options = new HashMap<>();
        options.put("url", url);
        options.put("dbtable", "city_province");
        options.put("user", user);
        options.put("password", password);

        Dataset<Row> cityInfoDS = sparkSession.read().format("jdbc").options(options).load();
        JavaPairRDD<String, Row> cityId2cityInfoRDD = cityInfoDS.toJavaRDD().mapToPair(row -> {
            String cityId = row.getString(0);
            return new Tuple2<>(cityId, row);
        });
        return cityId2cityInfoRDD;
    }


    public static JavaPairRDD<String, Row> getActionRDDByDateRange(SparkSession sparkSession, JSONObject taskParam) {
        String startDate = taskParam.getString(Constants.PARAM_START_DATE);
        String endDate = taskParam.getString(Constants.PARAM_END_DATE);

        String sql = "select * " +
                "from user_visit_action " +
                "where date>='" + startDate + "' " +
                "and date<='" + endDate + "'";
        Dataset<Row> actionDS = sparkSession.sql(sql);
        JavaPairRDD<String, Row> cityId2clickActionRDD = actionDS.javaRDD().mapToPair(row -> {
            String cityId = row.getString(6);
            return new Tuple2<>(cityId, row);
        });
        return cityId2clickActionRDD;

    }
}
