package com.loyalove.demo.spark.sql;

import static org.apache.spark.sql.functions.avg;
import static org.apache.spark.sql.functions.max;
import static org.apache.spark.sql.functions.min;
import static org.apache.spark.sql.functions.sum;
import static org.apache.spark.sql.functions.col;

import java.util.Properties;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;

public class SparkSql {

  public static void main(String[] args) {
    SparkSession spark = SparkSession
        .builder()
        .master("local[*]")
        .appName("jdbc")
        .getOrCreate();

    String url = "jdbc:mysql://rm-wz96lod4quo919vsp2o.mysql.rds.aliyuncs.com:3306/wateriot";
    Properties properties = new Properties();
    properties.setProperty("driver", "com.mysql.jdbc.Driver");
    properties.setProperty("user", "water");
    properties.setProperty("password", "Water@2018");
    properties.setProperty("useSSL", "false");

    Dataset<Row> core_user = spark.read()
        .jdbc(url, "meter_data_history", properties);

    core_user.printSchema();

    long start = System.currentTimeMillis();
    Dataset<Row> todayData = core_user
        .filter(col("collect_date").$eq$eq$eq("2018-07-01"));

    todayData.registerTempTable("today");

    Dataset<Row> dataSet = spark
        .sql("select meter_no as no, collect_time as time, fperiod_flow as flow from today");

    Dataset<Row> agg = dataSet.groupBy("no").agg(
        sum("flow").alias("sumFlow"),
        avg("flow").alias("avgFlow"),
        max("flow").alias("maxFlow"),
        min("flow").alias("minFlow")
    );

    agg.show();

    spark.close();
  }
}
