package com.hopu.shop;

import com.alibaba.fastjson.JSONObject;
import com.hopu.bean.shop.ProCateHotTop3;
import com.hopu.util.HBaseUtils;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.sql.*;

import java.io.IOException;
import java.util.Collections;
import java.util.List;
import java.util.Properties;

public class ProCateHotTop3Ana {

    public static void main(String[] args) {

        SparkSession spark = SparkSession.builder()
                .config("spark.driver.allowMultipleContexts", "true")
                .config("spark.sql.crossJoin.enabled", "true")
                .appName("proCateHotTop3Ana")
                .master("local").getOrCreate();
        JavaSparkContext context = new JavaSparkContext(spark.sparkContext());
        JavaRDD<String> rdd = context.textFile("D://word/user_session.log");
        JavaRDD<ProCateHotTop3> map = rdd.map(t -> {
            JSONObject json = (JSONObject) JSONObject.parse(t);
            String province = json.getString("province");
            String cid = json.getString("category_id");
            String cname = json.getString("category_name");
            String type = json.getString("event_type");
            return new ProCateHotTop3(province, cid, cname, type);
        });
        Dataset<Row> df = spark.createDataFrame(map, ProCateHotTop3.class);
//        df.show();

        //分组统计
        Dataset<Row> groupDF = df.groupBy("province", "cid", "cname", "type").count();
//        groupDF.orderBy("province").show();

        String[] proArr = {"北京市","上海市","天津市","重庆市","黑龙江省","辽宁省","吉林省","河北省",
                "河南省","湖北省","湖南省","山东省","山西省","陕西省",
                "安徽省","浙江省","江苏省","福建省","广东省","海南省","四川省",
                "云南省","贵州省","青海省","甘肃省","江西省","台湾省","内蒙古自治区","宁夏回族自治区",
                "新疆维吾尔自治区","西藏自治区","广西壮族自治区","香港特别行政区","澳门特别行政区"};
        for (String pro: proArr) {
            Dataset<Row> filter = groupDF.filter("province = '" + pro + "'");
            countCate(filter, spark);
        }
//        Dataset<Row> filter = groupDF.filter("province = '北京市'");
//        countCate(filter, spark);

    }

    public static void countCate(Dataset<Row> filterDF, SparkSession spark) {
//        filterDF.show();//20个类目，每个类目有三行数据
        //指定一个省份中，所有类目的统计结果根据Type来进行过滤
        //得到三个DF、每个DataFrame包含指定省份，全部商品类目，某一个操作类型的所有统计结果
        Dataset<Row> viewDF = filterDF.filter("type='view'");
        viewDF = viewDF.select(viewDF.col("province"),
                viewDF.col("type"),
                viewDF.col("cid"), viewDF.col("cname"),
                viewDF.col("count").as("viewCount"));
        Dataset<Row> cartDF = filterDF.filter("type='cart'");
        cartDF = cartDF.select(cartDF.col("province"),
                cartDF.col("type"),
                cartDF.col("cid"), cartDF.col("cname"),
                cartDF.col("count").as("cartCount"));
        Dataset<Row> parchaseDF = filterDF.filter("type='parchase'");


        Dataset<Row> joinDF = viewDF.join(cartDF, "cid").join(parchaseDF, "cid");

        Dataset<Row> df = joinDF.select(viewDF.col("cid"), viewDF.col("province"),
                viewDF.col("cname"), viewDF.col("viewCount"),
                cartDF.col("cartCount"), parchaseDF.col("count").as("parchaseCount"));

        Dataset<Row> top3 = df.orderBy(df.col("parchaseCount").desc(),
                df.col("cartCount").desc(),
                df.col("viewCount").desc()).limit(3);
        top3.show();


        //写入数据库
        Properties pro = new Properties();
        pro.setProperty("driver", "com.mysql.jdbc.Driver");
        pro.setProperty("user", "root");
        pro.setProperty("password", "123456");
        top3.write().mode(SaveMode.Append).jdbc("jdbc:mysql://192.168.136.200:3306/data_ana?useUnicode=true&characterEncoding=UTF-8", "pro_hot_cate_top3", pro);

        //写入HBase
//        Dataset<ProCateHotTop3> hotTop3 = top3.as(Encoders.bean(ProCateHotTop3.class));
//        List<ProCateHotTop3> rows = hotTop3.takeAsList(3);

//        String[] columns = {"province", "cid", "cname", "viewCount", "cartCount", "parchaseCount"};
//        for (ProCateHotTop3 proCateHotTop3: rows) {
//            String[] values = {proCateHotTop3.getProvince(), proCateHotTop3.getCid(), proCateHotTop3.getCname(),
//            proCateHotTop3.getViewCount()+"", proCateHotTop3.getCartCount()+"", proCateHotTop3.getParchaseCount()+""};
//
//            try {
//                HBaseUtils.putOneRowToHbase("province", proCateHotTop3.getProvince(), "top3hotcate", columns, values);
//            } catch (IOException e) {
//                e.printStackTrace();
//            }
//        }

        //SQL风格
//        viewDF.createOrReplaceTempView("viewDF");
//        cartDF.createOrReplaceTempView("cartDF");
//        parchaseDF.createOrReplaceTempView("parchaseDF");
//        spark.sql("select viewDF.province, viewDF.cid, viewDF.cname, viewDF.count as viewCount, cartDF.count as cartCount, parchaseDF.count as parchaseCount from viewDF join cartDF on viewDF.cid = cartDF.cid join parchaseDF on parchaseDF.cid = viewDF.cid").show();


/*
create  'province', 'top3hotcate'
 */


    }

}
