package com.youbu.demo;

import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;

import java.io.Serializable;
import java.text.SimpleDateFormat;
import java.util.Date;

/**
 * 基于MYSQl数据源的，Spark SQl数据处理DEMO
 * @author sunfangwei
 **/
public class IntegralCountSqlDemo implements Serializable {

	public static void main(String[] args) {
		//保存路径
		String savePath = args[0];
		//初始化设置
		SparkSession sparkSession = SparkSession.builder().appName("IntegralSql").getOrCreate();
		Dataset<Row> jdbcDF = sparkSession.read().format("jdbc")
				.option("url","jdbc:mysql://192.168.4.224:3306/spark_demo2?useUnicode=true&characterEncoding=UTF-8&serverTimezone=Asia/Shanghai")
				.option("dbtable", "integral_use_record_tab")
				.option("user", "root")
				.option("password", "root")
				.load().cache();
		long start = System.currentTimeMillis();
		//筛选列表中数据，并进行分组统计
		Dataset<Row> jdbcDF2 =jdbcDF.select("userId","amount").groupBy("userId").sum("amount").withColumnRenamed("sum(amount)","amountx");
		jdbcDF2 = jdbcDF2.orderBy(jdbcDF2.col("amountx").desc());
		jdbcDF2.show();
		//保存数据
		SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmss");
		jdbcDF2.write().json(savePath + "/" + sdf.format(new Date()));;
		long end = System.currentTimeMillis();
		System.out.println("-----OK,"+(end-start)/1000+"秒");

	}
}
