package com.shujia.indicators.core;

import com.alibaba.fastjson.JSON;
import com.shujia.indicators.utils.SparkUtils;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SaveMode;
import org.apache.spark.sql.SparkSession;

import java.util.List;
import java.util.stream.Collectors;

public class MemberEtl {
    public static void main(String[] args) {
        SparkSession sparkSession = SparkUtils.initSession();

        //sql 查询
        memberSexEtl(sparkSession);
        memberChannelEtl(sparkSession);
        memberHeatEtl(sparkSession);
        memberMpSubEtl(sparkSession);



    }

   static void memberSexEtl(SparkSession sparkSession){
       Dataset<Row> dataset = sparkSession.sql("select case sex when -1 then '未知' when 1 then '男' when 2 then '女' end as sex ,count(id) as sexCount from bigdata005_user_portraits_dim.dim_t_member group by sex ");

       dataset.write().format("jdbc")
               .mode(SaveMode.Append)
               .option("url","jdbc:mysql://localhost:3306/bigdata005?useSSL=false&useUnicode=true&characterEncoding=UTF-8")
               .option("dbtable","memberSex")
               .option("user","bigdata005")
               .option("password","ShuJia666777...")
               .option("driver","com.mysql.cj.jdbc.Driver")
               .save();

   }
   static void memberChannelEtl(SparkSession sparkSession){
       Dataset<Row> dataset = sparkSession.sql("select member_channel as memberChannel,count(id) as channelCount from bigdata005_user_portraits_dim.dim_t_member group by member_channel ");

       dataset.write().format("jdbc")
               .mode(SaveMode.Append)
               .option("url","jdbc:mysql://localhost:3306/bigdata005?useSSL=false&useUnicode=true&characterEncoding=UTF-8")
               .option("dbtable","memberChannel")
               .option("user","bigdata005")
               .option("password","ShuJia666777...")
               .option("driver","com.mysql.cj.jdbc.Driver")
               .save();

   }
   static void memberHeatEtl(SparkSession sparkSession){
       Dataset<Row> reg_complete_count = sparkSession.sql("select count(if(phone = 'null',true,null)) as reg," +
               "count(if(phone!='null',true,null)) as complete" +
               "from bigdata005_user_portraits_dim.dim_t_member ");

       Dataset<Row> order_orderAgain_count = sparkSession.sql("select count(if(t.orderCount = 1,true,null)) as order," +
               "count(if(t.orderCount > 1,true,null)) as orderAgain," +
               "from (select count(order_id) as orderCount,member_id from bigdata005_user_portraits_dim.t_member group by member_id)");
       Dataset<Row> coupon_count = sparkSession.sql("select count(distinct member_id ) as coupon from bigdata005_user_portraits_dim.dim_t_coupon_member)");

       Dataset<Row> dataset = reg_complete_count.crossJoin(order_orderAgain_count).crossJoin(coupon_count);

       dataset.write().format("jdbc")
               .mode(SaveMode.Append)
               .option("url","jdbc:mysql://localhost:3306/bigdata005?useSSL=false&useUnicode=true&characterEncoding=UTF-8")
               .option("dbtable","memberChannel")
               .option("user","bigdata005")
               .option("password","ShuJia666777...")
               .option("driver","com.mysql.cj.jdbc.Driver")
               .save();
   }
   static void memberMpSubEtl(SparkSession sparkSession){
       Dataset<Row> dataset = sparkSession.sql("select count(if(mp_open_id!='null',true,null)) as subCount,count(if(mp_open_id='null',true,null)) as unSubCount from bigdata005_user_portraits_dim.dim_t_member ");

       dataset.write().format("jdbc")
               .mode(SaveMode.Append)
               .option("url","jdbc:mysql://localhost:3306/bigdata005?useSSL=false&useUnicode=true&characterEncoding=UTF-8")
               .option("dbtable","memberChannel")
               .option("user","bigdata005")
               .option("password","ShuJia666777...")
               .option("driver","com.mysql.cj.jdbc.Driver")
               .save();
   }
}
