package com.hopu.shop.sex;

import com.alibaba.fastjson.JSONObject;
import com.hopu.bean.sex.SexCount;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SaveMode;
import org.apache.spark.sql.SparkSession;

import java.util.Properties;

public class SexCountAna {

    public static void main(String[] args) {

        SparkSession spark = SparkSession.builder()
                .config("spark.driver.allowMultipleContexts", "true")
                .config("spark.sql.crossJoin.enabled", "true")
                .appName("SexCountAna")
                .master("local").getOrCreate();
        JavaSparkContext context = new JavaSparkContext(spark.sparkContext());
        JavaRDD<String> rdd = context.textFile("D://word/user_session.log");
        JavaRDD<SexCount> map = rdd.map(t -> {
            JSONObject json = (JSONObject) JSONObject.parse(t);
            String sex = json.getString("sex");
            String type = json.getString("event_type");
            String uid = json.getString("uid");
            return new SexCount(sex, type, uid);
        });

        Dataset<Row> df = spark.createDataFrame(map, SexCount.class);
        df = df.select("sex", "uid");
        Dataset<Row> user = df.distinct().groupBy("sex").count();
        user.show();
        user.createOrReplaceTempView("user");

        JavaRDD<SexCount> view = map.filter(t -> "view".equals(t.getType()));
        JavaRDD<SexCount> cart = map.filter(t -> "cart".equals(t.getType()));
        JavaRDD<SexCount> parchase = map.filter(t -> "parchase".equals(t.getType()));

        Dataset<Row> viewDF = spark.createDataFrame(view, SexCount.class);
        Dataset<Row> cartDF = spark.createDataFrame(cart, SexCount.class);
        Dataset<Row> parchaseDF = spark.createDataFrame(parchase, SexCount.class);
        viewDF = viewDF.groupBy("sex").count();
        cartDF = cartDF.groupBy("sex").count();
        parchaseDF = parchaseDF.groupBy("sex").count();

        viewDF.createOrReplaceTempView("viewDF");
        cartDF.createOrReplaceTempView("cartDF");
        parchaseDF.createOrReplaceTempView("parchaseDF");
        Dataset<Row> joinDF = spark.sql("select viewDF.sex, viewDF.count as viewCount, cartDF.count as cartCount, parchaseDF.count as parchaseCount " +
                "from viewDF join cartDF on viewDF.sex = cartDF.sex join parchaseDF on parchaseDF.sex = viewDF.sex");

        joinDF.createOrReplaceTempView("joinDF");
        Dataset<Row> result = spark.sql("select joinDF.sex, viewCount, cartCount, parchaseCount, (viewCount + cartCount + parchaseCount) as totalCount, user.count as userCount from joinDF join user on joinDF.sex = user.sex");
        result.show();

        //写入到Mysql
        Properties pro = new Properties();
        pro.setProperty("driver", "com.mysql.jdbc.Driver");
        pro.setProperty("user", "root");
        pro.setProperty("password", "123456");
        result.write().mode(SaveMode.Append).jdbc("jdbc:mysql://192.168.136.200:3306/data_ana?useUnicode=true&characterEncoding=UTF-8", "sex_count", pro);


    }
}
