package spark;

import org.apache.spark.sql.AnalysisException;
import org.apache.spark.sql.Column;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.types.DataType;
import org.apache.spark.sql.types.DataTypes;

import java.util.HashMap;
import java.util.Map;

import scala.Tuple2;

/**
 * @author zhaoxuan
 * @date 2023-02-16 17:39
 **/
public class SparkSqlMain {
    public static void main(String[] args) throws AnalysisException {
        SparkSession spark = SparkSession
                .builder()
                .appName("Java Spark SQL user-defined Datasets aggregation example")
                .master("local")
                .getOrCreate();
        Dataset<Row> dataset = spark.read().json("D:\\test.json");
        dataset.createOrReplaceTempView("people");
        spark.udf().register("ofun",new OperatorFunction(), DataTypes.StringType);
        Dataset<Row> rowDataset = spark.sql("select max(age) max,min(age) min,name,ofun(name) from  people group by name");
        rowDataset.show();
/*        Map<String,String> agg=new HashMap<>();
        agg.put("age","sum");
        dataset.select(new Column())
        dataset.groupBy("name").agg(agg).show();*/

    }
}
