package com.shujia.indicators.core;

import com.shujia.indicators.utils.SparkUtils;
import javafx.scene.layout.BorderRepeat;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SaveMode;
import org.apache.spark.sql.SparkSession;

import java.text.SimpleDateFormat;
import java.time.LocalDate;
import java.time.ZoneId;
import java.util.Calendar;
import java.util.Date;

public class WeekCompareEtl {
    public static void main(String[] args) {
        SparkSession sparkSession = SparkUtils.initSession();
        LocalDate localDate = LocalDate.of(2019, 11, 30);

        Date now = Date.from(localDate.atStartOfDay(ZoneId.systemDefault()).toInstant());

        Calendar calendar = Calendar.getInstance();
        SimpleDateFormat foramt = new SimpleDateFormat("yyyy-MM-dd");
        calendar.setTime(now);
        calendar.add(Calendar.DATE,-7);
        Date sevenDayBefore = calendar.getTime();
        String format = foramt.format(sevenDayBefore);
        String sql = "select date_format(create_time,'yyyy-MM-dd') as day ,  count(id) as newRegCount " +
                " from bigdata005_user_portraits_dim.dim_t_member where create_time >='"+format+"'  group by date_format(create_time,'yyyy-MM-dd')  ";

        Dataset<Row> dataset = sparkSession.sql(sql);

        calendar.add(Calendar.DATE, -7);
        Date beforeWeek = calendar.getTime();
        String format_week = foramt.format(beforeWeek);

        String newSql = "select date_format(create_time,'yyyy-MM-dd') as day ,  count(id) as newRegCount " +
                " from bigdata005_user_portraits_dim.dim_t_member where create_time >='"+format_week+"' and create_time <'"+format+"'  group by date_format(create_time,'yyyy-MM-dd')  ";

        Dataset<Row> newDataset = sparkSession.sql(newSql);

        dataset.union(newDataset).write().format("jdbc")
                .mode(SaveMode.Append)
                .option("url","jdbc:mysql://localhost:3306/bigdata005?useSSL=false&useUnicode=true&characterEncoding=UTF-8")
                .option("dbtable","week_compare_reg")
                .option("user","bigdata005")
                .option("password","ShuJia666777...")
                .option("driver","com.mysql.cj.jdbc.Driver")
                .save();

        String orderSql = "select date_format(create_time,'yyyy-MM-dd') as day ,  count (order_id) as newOrderCount " +
                " from bigdata005_user_portraits_dwd.dwd_t_order where create_time >='"+format+"'  group by date_format(create_time,'yyyy-MM-dd')  ";
        Dataset<Row> orderDataset = sparkSession.sql(orderSql);

        String newOrderSql = "select date_format(create_time,'yyyy-MM-dd') as day ,  count(order_id) as newOrderCount " +
                "from bigdata005_user_portraits_dwd.dwd_t_order where create_time >='"+format_week+"' and create_time <'"+format+"'  group by date_format(create_time,'yyyy-MM-dd')  ";
        Dataset<Row> newOrderDataset = sparkSession.sql(newOrderSql);
        orderDataset.union(newOrderDataset).write().format("jdbc")
                .mode(SaveMode.Append)
                .option("url","jdbc:mysql://localhost:3306/bigdata005?useSSL=false&useUnicode=true&characterEncoding=UTF-8")
                .option("dbtable","week_compare_order")
                .option("user","bigdata005")
                .option("password","ShuJia666777...")
                .option("driver","com.mysql.cj.jdbc.Driver")
                .save();
    }
}
