package com.sql;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.sql.DataFrame;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.RowFactory;
import org.apache.spark.sql.SQLContext;
import org.apache.spark.sql.api.java.UDF1;
import org.apache.spark.sql.expressions.UserDefinedAggregateFunction;
import org.apache.spark.sql.types.DataType;
import org.apache.spark.sql.types.DataTypes;
import org.apache.spark.sql.types.StructField;
import org.apache.spark.sql.types.StructType;
import scala.Tuple2;

import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;

/**
 * 测试自定义函数
 */
public class UDFTest {
    public static void main(String[] args) {

        SparkConf conf=new SparkConf();
        conf.setMaster("local").setAppName("udf");

        JavaSparkContext  sc=new JavaSparkContext(conf);

        SQLContext sqlContext=new SQLContext(sc);

        JavaRDD<Tuple2> rdd = sc.parallelize(Arrays.asList(
                new Tuple2("zhangsan", 18),
                new Tuple2("zhaoliu", 18),
                new Tuple2("lisi", 20)
        ));


        JavaRDD<Row> row = rdd.map(new Function<Tuple2, Row>() {
            @Override
            public Row call(Tuple2 tuple2) throws Exception {
                return RowFactory.create(tuple2._1, tuple2._2);
            }
        });
        List<StructField> fields=new ArrayList<>();
        fields.add(DataTypes.createStructField("username",DataTypes.StringType,true));
        fields.add(DataTypes.createStructField("age",DataTypes.IntegerType,true));
        StructType schema= DataTypes.createStructType(fields);
        DataFrame dataFrame = sqlContext.createDataFrame(row, schema);

        dataFrame.registerTempTable("person");
        sqlContext.udf().register("nameLen", new UDF1<String, Integer>() {  //sql中所用的函数的定义udf1参数1  。。udf2等等
            @Override
            public Integer call(String s) throws Exception {
                return s.length();
            }
        }, DataTypes.IntegerType);


        DataFrame sql = sqlContext.sql("select nameLen(username),username as nameLen from person where age=20 ");

        sql.show();

        sc.close();

    }
}
