package com.czk.java;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.sql.*;
import org.apache.spark.sql.api.java.UDF1;
import org.apache.spark.sql.types.DataTypes;

import java.io.Serializable;

/**
 * @Author:ChenZhangKun
 * @Date: 2021/12/14 10:08
 */
public class JavaSparkJdbc {
    public static void main(String[] args) throws AnalysisException {
        SparkSession sparkSession = SparkSession.builder().appName("java").master("local[*]").getOrCreate();
        sparkSession.sparkContext().setLogLevel("WARN");
        Dataset<Row> df = sparkSession.read()
                .format("jdbc")
                .option("url", "jdbc:mysql://localhost:3306/test")
                .option("driver", "com.mysql.jdbc.Driver")
                .option("user", "root")
                .option("password", "1234")
                .option("dbtable", "user")
                .load();
        df.show();

        Dataset<Person> dataset = df.as(Encoders.bean(Person.class));
        df.printSchema();
        dataset.foreach(person-> {
            System.out.println(person);
        });

        df.createOrReplaceTempView("user");
        // 注册函数
        sparkSession.udf().register("prefix",(UDF1<String,String>) x->"Name:"+x, DataTypes.StringType);
        sparkSession.sql("select prefix(name) from user where id >200").show();
    }

    public static class Person implements Serializable{
        private String name;
        private long id;
        public String getName() {
            return name;
        }

        public void setName(String name) {
            this.name = name;
        }

        public long getId() {
            return id;
        }

        public void setId(long id) {
            this.id = id;
        }

        @Override
        public String toString() {
            return "Person{" +
                    "name='" + name + '\'' +
                    ", id=" + id +
                    '}';
        }
    }
}
