package com.zzl.spark.sql.demo;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.RowFactory;
import org.apache.spark.sql.SQLContext;
import org.apache.spark.sql.types.DataTypes;
import org.apache.spark.sql.types.StructField;
import org.apache.spark.sql.types.StructType;

import java.util.*;

public class DailySale {

    public static void main(String[] args) {
        SparkConf conf = new SparkConf().setMaster("local").setAppName("DailySale");
        JavaSparkContext sc = new JavaSparkContext(conf);
        System.setProperty("hadoop.home.dir", "E:\\hadoop");
        SQLContext sqlContext =new SQLContext(sc);
        List<String> userSaleLog = Arrays.asList(
                "2016-9-01,55,1122",
                "2016-9-02,56,1122",
                "2016-9-01,55,1133",
                "2016-9-02,",
                "2016-9-02,56,1144",
                "2016-9-03,78,1155",
                "2016-9-04,113,1123"
        );
        JavaRDD<String> parallelize = sc.parallelize(userSaleLog);

        JavaRDD<String> filterUserLogRdd = parallelize.filter((s) -> s.split(",").length == 3 ? true : false);

        JavaRDD<Row> userSaleLogRowRDD = filterUserLogRdd.map((line) -> {
            String[] spilt = line.split(",");
            return RowFactory.create(spilt[0],Integer.valueOf(spilt[1]));
        });
        List<StructField> structFields = new ArrayList<>();
        structFields.add(DataTypes.createStructField("date",DataTypes.StringType,true));
        structFields.add(DataTypes.createStructField("sale_amount",DataTypes.IntegerType,true));
        StructType structType = DataTypes.createStructType(structFields);

        Dataset<Row> dataFrame = sqlContext.createDataFrame(userSaleLogRowRDD, structType);

        dataFrame.createOrReplaceTempView("sale");
        dataFrame.show();
        Map<String,String> expr = new HashMap<>();

        expr.put("sale_amount","sum");

        dataFrame.groupBy(dataFrame.col("date")).agg(expr).show();


        String sql = "select date,sum(sale_amount) count from sale group by date";
        sqlContext.sql(sql).show();

        sc.stop();
    }
}
