package com.catmiao.sql;


import org.apache.spark.rdd.RDD;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Encoders;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import scala.Serializable;


/**
 * 不同模型下的转换
 */
public class Spark05_Model_convert {

    public static void main(String[] args) {

        // 构建SparkSQL的环境对象时，一般采用构建器模式
        // 构造器模式：构建对象
        SparkSession sparkSession = SparkSession
                .builder()
                .master("local[*]")
                .appName("SparkSQL")
                .getOrCreate();


        Dataset<Row> dataset = sparkSession.read().json("datas/sql/user.json");
        RDD<Row> rdd = dataset.rdd();

        // TODO DataFrame
        dataset.foreach(
                row -> {
                    System.out.println(row.get(2));
                }
        );

        // TODO 将数据模型中的数据进行类型转换，将Row转换成其他对象进行处理
        Dataset<User> userDs = dataset.as(Encoders.bean(User.class));
        userDs.foreach(
                item -> {
                    System.out.println(item);
                }
        );

        sparkSession.close();
    }

}


class User implements Serializable{

    private String username;

    private Integer aage;

    public String getUsername() {
        return username;
    }

    public void setUsername(String username) {
        this.username = username;
    }

    public Integer getAage() {
        return aage;
    }

    public void setAage(Integer aage) {
        this.aage = aage;
    }

    @Override
    public String toString() {
        return "User{" +
                "username='" + username + '\'' +
                ", aage=" + aage +
                '}';
    }
}