package com.study.spark.scala.sql

import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.types.{LongType, StringType, StructType}

/**
 * SQL json
 *
 * @author stephen
 * @date 2019-09-27 18:03
 */
object JsonFunctionDemo {

  def main(args: Array[String]): Unit = {

    val spark = SparkSession
      .builder()
      .appName("JsonFunctionDemo")
      .master("local[*]")
      .getOrCreate()

    // 隐式转换
    import spark.implicits._
    import org.apache.spark.sql.functions._

    val df = spark.createDataset(Seq(
      ("aaa", 1, 2), ("bbb", 3, 4), ("ccc", 3, 5), ("bbb", 4, 6)))
      .toDF("key1", "key2", "key3")

    // 按列名转成json格式记录
    val jsonDF = df.select(to_json(struct($"key1", $"key2", $"key3")).as("json_key"))
    jsonDF.show(false)

    // json格式数据转回列
    val jsonDF2 = jsonDF.select(
      from_json($"json_key",
        new StructType().add("key1", StringType)
          .add("key2", LongType)
          .add("key3", LongType)
      ).as("json_data"))
    jsonDF2.select(
      $"json_data.key1".as("key1"),
      $"json_data.key2".as("key2"),
      $"json_data.key3".as("key3")
    ).show()

    spark.stop()
  }
}
