package com.hopu.shop.era;

import com.alibaba.fastjson.JSONObject;

import com.hopu.bean.era.EraHotCateTop3;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SaveMode;
import org.apache.spark.sql.SparkSession;

import java.util.Properties;

public class EraHotCateTop3Ana {

    public static void main(String[] args) {

        SparkSession spark = SparkSession.builder()
                .config("spark.driver.allowMultipleContexts", "true")
                .config("spark.sql.crossJoin.enabled", "true")
                .appName("EraHotCateTop3Ana")
                .master("local").getOrCreate();
        JavaSparkContext context = new JavaSparkContext(spark.sparkContext());
        JavaRDD<String> rdd = context.textFile("D://word/user_session.log");
        JavaRDD<EraHotCateTop3> map = rdd.map(t -> {
            JSONObject json = (JSONObject) JSONObject.parse(t);
            String era = json.getJSONObject("birthday").getString("year").substring(2, 3) + "0";
            String cid = json.getString("category_id");
            String cname = json.getString("category_name");
            String type = json.getString("event_type");
            return new EraHotCateTop3(era, cid, cname, type);
        });

        String[] eras = {"50", "60", "70", "80", "90", "00", "10"};
        for (String e: eras) {
            JavaRDD<EraHotCateTop3> filter = map.filter(t -> e.equals(t.getEra()));
            insert(filter, spark);
        }

    }

    public static void insert(JavaRDD<EraHotCateTop3> map, SparkSession spark) {
        JavaRDD<EraHotCateTop3> view = map.filter(t -> "view".equals(t.getType()));
        JavaRDD<EraHotCateTop3> cart = map.filter(t -> "cart".equals(t.getType()));
        JavaRDD<EraHotCateTop3> parchase = map.filter(t -> "parchase".equals(t.getType()));
        Dataset<Row> viewDF = spark.createDataFrame(view, EraHotCateTop3.class);
        Dataset<Row> cartDF = spark.createDataFrame(cart, EraHotCateTop3.class);
        Dataset<Row> parchaseDF = spark.createDataFrame(parchase, EraHotCateTop3.class);

        viewDF = viewDF.groupBy("cid", "cname", "era").count();
        cartDF = cartDF.groupBy("cid", "cname", "era").count();
        parchaseDF = parchaseDF.groupBy("cid", "cname", "era").count();

        viewDF.createOrReplaceTempView("viewDF");
        cartDF.createOrReplaceTempView("cartDF");
        parchaseDF.createOrReplaceTempView("parchaseDF");
        Dataset<Row> joinDF = spark.sql("select viewDF.era, viewDF.cid, viewDF.cname, viewDF.count as viewCount, cartDF.count as cartCount, parchaseDF.count as parchaseCount " +
                "from viewDF join cartDF on viewDF.cid = cartDF.cid and viewDF.era = cartDF.era join parchaseDF on parchaseDF.cid = viewDF.cid and parchaseDF.era = viewDF.era");

        Dataset<Row> order = joinDF.orderBy(joinDF.col("parchaseCount").desc(),
                joinDF.col("cartCount").desc(), joinDF.col("viewCount").desc());
        order.show();

        //取出前三
        Dataset<Row> limit = order.limit(3);

        //写入到Mysql
        Properties pro = new Properties();
        pro.setProperty("driver", "com.mysql.jdbc.Driver");
        pro.setProperty("user", "root");
        pro.setProperty("password", "123456");
        limit.write().mode(SaveMode.Append).jdbc("jdbc:mysql://192.168.136.200:3306/data_ana?useUnicode=true&characterEncoding=UTF-8", "era_hot_cate_top3", pro);

    }
}
