package com.xx.sparkdemo

import org.apache.spark.sql.{Dataset, Row, SparkSession}

/**
 *
 * @author tzp
 * @since 2021/8/3
 */
object StrangeJson {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder()
      .master("local")
      .getOrCreate()
    val sc = spark.sparkContext

    import spark.implicits._

    val data =
      """
        |{"timestamp": "2021-07-30T09:41:51Z", "payload": {"field1": "some text"}}
        |{"timestamp": "2021-07-30T09:41:52Z", "payload": {"field1": true}}
        |{"timestamp": "2021-07-30T09:41:53Z", "payload": {"field1": 123}}
        |""".stripMargin.lines.toList
    val ds = sc.parallelize(data).toDS()

    import org.apache.spark.sql.types.{DataTypes, Metadata, StructField, StructType}

    //new关键字可省略
    val schema = StructType(Array[StructField](
      new StructField("timestamp", DataTypes.StringType, false, Metadata.empty),
      StructField("payload", DataTypes.StringType, false, Metadata.empty))
    )

    val df: Dataset[Row] = spark.read.schema(schema).json(ds)

    df.map(r => "{\"timestamp\": \"%s\", \"payload\": %s".format(r.getString(0), r.getString(1)))
      .write.text("file:///Users/tzp/Documents/private/cnm/CodeAccumulate/id-magic/imeigenerator/target/output" + System.currentTimeMillis())
    //    df.write.json("file:///Users/tzp/Documents/private/cnm/CodeAccumulate/id-magic/imeigenerator/target/output.json")
  }
}
