package com.hopu.shop.total;

import com.alibaba.fastjson.JSONObject;
import com.hopu.bean.total.TotalCount;
import com.util.JDBCUtils;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.FilterFunction;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;

public class TotalCountAna {

    public static void main(String[] args) {

        SparkSession spark = SparkSession.builder()
                .config("spark.driver.allowMultipleContexts", "true")
                .config("spark.sql.crossJoin.enabled", "true")
                .appName("totalCountAna")
                .master("local").getOrCreate();
        JavaSparkContext context = new JavaSparkContext(spark.sparkContext());
        JavaRDD<String> rdd = context.textFile("D://word/user_session.log");
        JavaRDD<TotalCount> map = rdd.map(t -> {
            JSONObject json = (JSONObject) JSONObject.parse(t);
            String uid = json.getString("uid");
            String type = json.getString("event_type");
            String year = json.getJSONObject("odate").getString("year");
            String month = json.getJSONObject("odate").getString("monthValue");
            String day1 = json.getJSONObject("odate").getString("dayOfMonth");
            String day = year + "-" +  month + "-" + day1;
            return new TotalCount(uid, day ,type);
        });

        Dataset<Row> df = spark.createDataFrame(map, TotalCount.class);
        df.show();
        //计算总 PV
        long count = df.count();

        //计算 UV
        long count1 = df.select("uid", "day").distinct().count();

        //统计总浏览量
        long viewCount = df.filter("type = 'view'").count();

        long cartCount = df.filter("type = 'cart'").count();

        long parchaseCount = df.filter("type = 'parchase'").count();

        long totalCount = viewCount + cartCount + parchaseCount;

        JDBCUtils.executeUpdate("insert into total_count values(null, " + viewCount + ", " + cartCount + ", " +
                parchaseCount + "," + totalCount + "," + count1 + ")");



//        Dataset<Row> uid = df.select("uid").distinct();
//
//        Dataset<Row> viewDF = df.filter("type = 'view'").groupBy("type").count();
//        viewDF = viewDF.select(viewDF.col("count").as("viewCount"));
//
//        Dataset<Row> cartDF = df.filter("type = 'cart'").groupBy("type").count();
//        cartDF = cartDF.select(cartDF.col("count").as("cartCount"));
//
//        Dataset<Row> parchaseDF = df.filter("type = 'parchase'").groupBy("type").count();
//        parchaseDF = parchaseDF.select(parchaseDF.col("count").as("parchaseCount"));
//
//
//        viewDF.join(cartDF).join(parchaseDF);
//        uid.createOrReplaceTempView("uid");
//        uid.show();

//        viewDF.createOrReplaceTempView("viewDF");
//        cartDF.createOrReplaceTempView("cartDF");
//        parchaseDF.createOrReplaceTempView("parchaseDF");
//        Dataset<Row> joinDF = spark.sql("select viewCount, cartCount, parchaseCount, (viewCount + cartCount + parchaseCount) as totalCount from viewDF join cartDF join parchaseDF");
//        joinDF.show();

//        spark.sql("select select viewCount, cartCount, parchaseCount, totalCount, COUNT() from joinDF join uid");





    }

}
