package spark_sql;

import bean.User;
import java.util.Arrays;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Encoder;
import org.apache.spark.sql.Encoders;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;

/**
 * @author shihb
 * @date 2020/1/10 18:13
 */
public class DataTransform {

  public static void main(String[] args) {
    //local模式,创建SparkConf对象设定spark的部署环境
    SparkConf sparkConf = new SparkConf().setMaster("local[*]").setAppName("");
    //创建spark上下文对象
    JavaSparkContext jsc = new JavaSparkContext(sparkConf);
    //创建SparkSQL的环境对象
//    SparkSession spark = SparkSession.builder().config(sparkConf).getOrCreate();
    SparkSession spark = new SparkSession(jsc.sc());


    //创建Rdd
    JavaRDD<User> rdd = jsc.parallelize(Arrays.asList(
        new User(1, "zhangsan", 20),
        new User(2, "lisi", 30),
        new User(3, "wangwu", 40)
        ),1);
    //1 Rdd转Dataset
    //1.1 Rdd<User>直接转Dataset<User>
    Encoder<User> userEncoder = Encoders.bean(User.class);
    Dataset<User> dataset = spark.createDataset(rdd.rdd(), userEncoder);
    //1.2 Rdd<User>转Dataset<Row>
    Dataset<Row> dataFrame = spark.createDataFrame(rdd, User.class);


    //2 Dataset互转
    //2.1 Dataset<Row>转Dataset<User>
    Dataset<User> dataset1 = dataFrame.as(userEncoder);
    //2.2 Dataset<User>转Dataset<Row>
    Dataset<Row> dataFrame1 = dataset.toDF();

    //3 Dataset转Rdd
    //3.1 Dataset<User>转Rdd
    JavaRDD<User> rdd1 = dataset.toJavaRDD();
    //3.2 Dataset<Row>转Rdd

    JavaRDD<Row> rdd2 = dataFrame.toJavaRDD();

    //顺序和想象的不一样，好像是按字符串排序了
//    rdd2.foreach(row->{
//      System.out.print(row.get(0));//age
//      System.out.print(row.get(1));//id
//      System.out.println(row.get(2));//name
//    });

    dataset.createOrReplaceTempView("user");
    //采用sql的语法访问数据
    Dataset<Row> result = spark.sql("select * from user");
    result.show();
    //释放资源
    spark.stop();
  }

}

