import org.apache.spark.rdd.RDD
import org.apache.spark.sql.types.StructType
import org.apache.spark.sql.{DataFrame, Dataset, Row, SparkSession}

import java.util

object SparkSQLTest {
  def main(args: Array[String]): Unit = {
    //创建或得到SparkSession
    val spark = SparkSession.builder()
      .appName("SparkSQLDataSource")
      .config("spark.sql.parquet.mergeSchema", true)
      .master("local[*]")
      .getOrCreate()

    //导入隐式转换
    import spark.implicits._

    spark.sparkContext.setLogLevel("WARN")
    //创建List集合，存储姓名和年龄
    val studentList = List(("jock", "22", null, null), ("lucy", "20", null, null), ("lucy1", "20", "xx", "xx"))
    val valueDF: Dataset[(String, String, String, String)] = spark.createDataset(studentList)
    val frame: DataFrame = valueDF.toDF("name","age","test1","test2")
    val schema: StructType = frame.schema

    val value: RDD[Row] = frame.rdd.map(row => {
      val s: String = null
      System.out.println(s)
      val list: util.List[String] = new util.ArrayList[String]
      for (i <- 0 until 4) {
        list.add(s)
      }

      import scala.collection.JavaConversions._
/*      for (s1 <- list) {
        System.out.println(s1)
      }*/
      val row1 = Row.fromSeq(list)
      row1
    })

    val dataFrame: DataFrame = spark.createDataFrame(value, schema)
    dataFrame.printSchema()
    dataFrame.show(10, false)





    spark.stop()

  }
}
