package com.offcn.bigdata.sql.p1

import com.offcn.bigdata.sql.domain.Person
import org.apache.spark.sql.{Dataset, Row, SparkSession}
import org.apache.spark.sql.types._

import scala.collection.JavaConversions

/**
  * SparkSQL编程模型的构造操作
  *     DataFrame V.S.DataSet
  */
object _02ProgramModelOps {
    def main(args: Array[String]): Unit = {
        val spark = SparkSession.builder()
                .appName("_01SparkSQLOps")
                .master("local[*]")
                .getOrCreate()
        /*
            dataframe
                反射：beanClass
                动态编程
         */
        //隐式转换，可以直接将java list和scala的集合互相转换
        import JavaConversions._
        val persons = List(
            new Person("张皓", 15, 169.5),
            new Person("冯凡", 25, 179.5),
            new Person("单松", 12, 109.5),
            new Person("林博", 25, 139.5)
        )
        var pdf = spark.createDataFrame(persons, classOf[Person])
        pdf.show()
        println("-----------------动态编程的方式-----------------------")
        val schema = StructType(Array(
            StructField("name", DataTypes.StringType, false),
            StructField("age", DataTypes.IntegerType, false),
            StructField("height", DataTypes.DoubleType, false)
        ))

        val rows = persons.map(person => Row(person.getName, person.getAge, person.getHeight))
        pdf = spark.createDataFrame(rows, schema)
        pdf.show()
        /*
            dataset
           an implicit Encoder is needed to store Person instances in a Dataset.
           Primitive types (Int, String, etc) and Product types (case classes) are
           supported by importing spark.implicits._
            构造dataset，需要两个参数：一者case class ，二者隐式转换
         */
        println("-----------------dataset-----------------------")
        import spark.implicits._
        val ds:Dataset[People] = spark.createDataset(List(
            People("张皓", 15, 169.5),
            People("冯凡", 25, 179.5),
            People("单松", 12, 109.5),
            People("林博", 25, 139.5)
        ))
        ds.show()

        spark.stop()
    }
}
case class People(name: String, age: Int, height: Double)
