package demo

import org.apache.spark.sql.SparkSession
import com.fasterxml.jackson.databind.ObjectMapper
import com.fasterxml.jackson.module.scala.DefaultScalaModule

object DemoMain {
  System.setProperty("hadoop.home.dir", "C:\\hadoop-2.8.1")
  val spark: SparkSession = SparkSession.builder.appName("Demo").master("local").getOrCreate

  def main(args: Array[String]): Unit = {
    //Json str
    val jsonStr =
      """[
        |	{
        |		"class": "高二（1）班",
        |		"head_teacher": {
        |			"name": "Mr.Wang",
        |			"education": "硕士"
        |		},
        |		"score": 80,
        |		"students": [
        |			{
        |				"name": "zhangsan",
        |				"age": 15,
        |				"score": 50
        |			},
        |			{
        |				"name": "lisi",
        |				"age": 16,
        |				"phone_num": "123456789",
        |				"score": 60
        |			}
        |		]
        |	},
        |	{
        |		"class": "高二（2）班",
        |		"head_teacher": {
        |			"name": "Mr.Li",
        |			"education": "博士"
        |		},
        |		"score": 100,
        |		"students": [
        |			{
        |				"name": "wangwu",
        |				"age": 14,
        |				"phone_num": "987654321",
        |				"score": 70
        |			},
        |			{
        |				"name": "zhaoliu",
        |				"phone_num": "147258369",
        |				"score": 80
        |			}
        |		]
        |	}
        |]""".stripMargin

    //Json mapper
    val mapper = new ObjectMapper()
    mapper.registerModule(DefaultScalaModule)

    //Json to Object collection
    val classes = mapper.readValue(jsonStr, classOf[Array[MiddleSchoolClass]])

    //Object collection to dataSet/dataFrame
    import spark.implicits._
    val ds = spark.sparkContext.parallelize(classes).toDS()
    ds.show(false)
  }
}
