package com.hopu.shop.pro;

import com.alibaba.fastjson.JSONObject;
import com.hopu.bean.pro.ProTotalCount;
import com.hopu.util.HBaseUtils;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SaveMode;
import org.apache.spark.sql.SparkSession;

import java.io.IOException;
import java.util.List;
import java.util.Properties;

public class ProTotalCountAna {

    public static void main(String[] args) {

        SparkSession spark = SparkSession.builder()
                .config("spark.driver.allowMultipleContexts", "true")
                .config("spark.sql.crossJoin.enabled", "true")
                .appName("proTotalCountAna")
                .master("local").getOrCreate();
        JavaSparkContext context = new JavaSparkContext(spark.sparkContext());
        JavaRDD<String> rdd = context.textFile("D://word/user_session.log");
        JavaRDD<ProTotalCount> map = rdd.map(t -> {
            JSONObject json = (JSONObject) JSONObject.parse(t);
            String province = json.getString("province");
            String type = json.getString("event_type");
            return new ProTotalCount(province, type);
        });
        Dataset<Row> df = spark.createDataFrame(map, ProTotalCount.class);

        //df.show();
        //统计各区域总操作量
        Dataset<Row> viewDF = df.filter("type = 'view'");
        Dataset<Row> cartDF = df.filter("type = 'cart'");
        Dataset<Row> parchaseDF = df.filter("type = 'parchase'");

        viewDF = viewDF.groupBy("province").count();
        cartDF = cartDF.groupBy("province").count();
        parchaseDF = parchaseDF.groupBy("province").count();

        viewDF.createOrReplaceTempView("viewDF");
        cartDF.createOrReplaceTempView("cartDF");
        parchaseDF.createOrReplaceTempView("parchaseDF");
        Dataset<Row> joinDF = spark.sql("select viewDF.province, " +
                "viewDF.count as viewCount, cartDF.count as cartCount, parchaseDF.count as parchaseCount " +
                "from viewDF join cartDF on viewDF.province = cartDF.province join parchaseDF on parchaseDF.province = viewDF.province");

//        joinDF.show();
        joinDF.createOrReplaceTempView("joinDF");
        Dataset<Row> sql = spark.sql("select province, viewCount, cartCount, parchaseCount, (viewCount + cartCount + parchaseCount) as totalCount from joinDF");
        sql.show();

//        Dataset<Row> province = df.groupBy("province").count();
//        province.show();

        //保存到 MySQL 数据库
        Properties properties = new Properties();
        properties.setProperty("driver", "com.mysql.jdbc.Driver");
        properties.setProperty("user", "root");
        properties.setProperty("password", "123456");
        sql.write().mode(SaveMode.Append).jdbc("jdbc:mysql://192.168.136.200:3306/data_ana?useUnicode=true&characterEncoding=UTF-8", "pro_total_count", properties);

        //保存到HBase数据库
//        List<Row> rows = province.collectAsList();
//        String[] columns = {"province", "count"};
//        for (Row row: rows) {
//            String[] values = {row.get(0).toString(), row.get(1).toString()};
//            try {
//                HBaseUtils.putOneRowToHbase("pro_total", row.getString(0), "pro_total_count", columns, values);
//            } catch (IOException e) {
//                e.printStackTrace();
//            }
//        }

    }
}
