package com.hopu.shop;

import com.alibaba.fastjson.JSONObject;
import com.hopu.bean.shop.ProCateHotTop3;
import com.hopu.bean.shop.ProShopHotTop10;
import com.hopu.util.HBaseUtils;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.sql.*;

import java.io.IOException;
import java.util.List;
import java.util.Properties;

public class ProShopTop10Ana {
    public static void main(String[] args) {

        SparkSession spark = SparkSession.builder()
                .config("spark.driver.allowMultipleContexts", "true")
                .config("spark.sql.crossJoin.enabled", "true")
                .appName("proCateHotTop3Ana")
                .master("local").getOrCreate();
        JavaSparkContext context = new JavaSparkContext(spark.sparkContext());
        JavaRDD<String> rdd = context.textFile("D://word/user_session.log");

        //把RDD转换成实体类对应的RDD
        JavaRDD<ProShopHotTop10> map = rdd.map(t -> {

            JSONObject json = (JSONObject) JSONObject.parse(t);
            String province = json.getString("province");
            String sid = json.getString("shop_id");
            String sname = json.getString("shop_name");
            String type = json.getString("event_type");
            return new ProShopHotTop10(province, sid, sname, type);
        });

        //把实体类RDD转换成DF
        Dataset<Row> df = spark.createDataFrame(map, ProShopHotTop10.class);

        //分组统计，统计每一个区域
        Dataset<Row> count = df.groupBy("province", "sid", "sname", "type").count();

//        count.show();

        //省份sid sname type 总数量 2000 34 个区域
        String[] proArr = {"北京市","上海市","天津市","重庆市","黑龙江省","辽宁省","吉林省","河北省",
                "河南省","湖北省","湖南省","山东省","山西省","陕西省",
                "安徽省","浙江省","江苏省","福建省","广东省","海南省","四川省",
                "云南省","贵州省","青海省","甘肃省","江西省","台湾省","内蒙古自治区","宁夏回族自治区",
                "新疆维吾尔自治区","西藏自治区","广西壮族自治区","香港特别行政区","澳门特别行政区"};
        for (String pro: proArr) {
            Dataset<Row> proDF = count.filter("province = '"+pro+"'");
            top10hotshop(proDF, spark);
        }

    }

    public static void top10hotshop(Dataset<Row> proDF, SparkSession spark) {
        //过滤出某个省份，3个操作类型的DF
        Dataset<Row> viewDF = proDF.filter("type = 'view'");
        Dataset<Row> cartDF = proDF.filter("type = 'cart'");
        Dataset<Row> parchaseDF = proDF.filter("type = 'parchase'");

        viewDF.createOrReplaceTempView("viewDF");
        cartDF.createOrReplaceTempView("cartDF");
        parchaseDF.createOrReplaceTempView("parchaseDF");
        Dataset<Row> joinDF = spark.sql("select viewDF.province, viewDF.sid, viewDF.sname, " +
                "viewDF.count as viewCount, cartDF.count as cartCount, parchaseDF.count as parchaseCount " +
                "from viewDF join cartDF on viewDF.sid = cartDF.sid join parchaseDF on parchaseDF.sid = viewDF.sid");

        //排序
        Dataset<Row> orderDF = joinDF.orderBy(joinDF.col("viewCount").desc(),
                joinDF.col("cartCount").desc(),
                joinDF.col("parchaseCount").desc());

        Dataset<Row> limit10 = orderDF.limit(10);
        limit10.show();

        //写入到Mysql
        Properties pro = new Properties();
        pro.setProperty("driver", "com.mysql.jdbc.Driver");
        pro.setProperty("user", "root");
        pro.setProperty("password", "123456");
        limit10.write().mode(SaveMode.Append).jdbc("jdbc:mysql://192.168.136.200:3306/data_ana?useUnicode=true&characterEncoding=UTF-8", "pro_hot_shop_top10", pro);


        //写入HBase
//        Dataset<ProShopHotTop10> hotTop10 = limit10.as(Encoders.bean(ProShopHotTop10.class));
//        List<ProShopHotTop10> rows = hotTop10.takeAsList(10);
//        String[] columns = {"province", "sid", "sname", "viewCount", "cartCount", "parchaseCount"};
//        for (ProShopHotTop10 proShopHotTop10: rows) {
//            String[] values = {proShopHotTop10.getProvince(), proShopHotTop10.getSid(), proShopHotTop10.getSname(),
//                    proShopHotTop10.getViewCount()+"", proShopHotTop10.getCartCount()+"", proShopHotTop10.getParchaseCount()+""};
//
//            try {
//                HBaseUtils.putOneRowToHbase("pro", proShopHotTop10.getProvince(), "top10shop", columns, values);
//            } catch (IOException e) {
//                e.printStackTrace();
//            }
//        }

    }
}
