package cn.wangjie.spark.sources

import org.apache.spark.sql.{DataFrame, Dataset, SparkSession}

/**
 * SparkSQL读取JSON格式文本数据
 */
object SparkSQLJson {
  def main(args: Array[String]): Unit = {

    // 构建SparkSession实例对象
		val spark: SparkSession = SparkSession.builder()
			.appName(this.getClass.getSimpleName.stripSuffix("$"))
			// TODO: 设置SparkSQL产生Shuffle时分区数目
			.config("spark.sql.shuffle.partitions", "4")
			.master("local[2]")
			.getOrCreate()
		import spark.implicits._
		
		// TODO: 采用SparkSQL中提供json函数读取数据
		val gitDF: DataFrame = spark.read.json("datas/json/2015-03-01-11.json.gz")
		//gitDF.printSchema()
		//gitDF.show(1, truncate = false)
		
		println("==================================================")
		
		val dataset: Dataset[String] = spark.read.textFile("datas/json/2015-03-01-11.json.gz")
		//dataset.printSchema()
		//dataset.show(10, truncate = false)
		
		// 从JSON数据中提取指定字段：id、type、public和created_at
		// TODO：使用functions函数库 -> get_json_object
		import org.apache.spark.sql.functions._
		val dataframe: DataFrame = dataset.select(
			get_json_object($"value", "$.id").as("id"),
			get_json_object($"value", "$.type").as("type"),
			get_json_object($"value", "$.public").as("public"),
			get_json_object($"value", "$.created_at").as("created_at")
		)
		dataframe.printSchema()
		dataframe.show(10, false)
		
		
		// 应用结束关闭资源
		spark.stop()
	}
	
}
