import org.apache.spark.sql.DataFrame
import org.apache.spark.sql.functions.{arrays_zip, col}
import org.example.utils.CommonUtils

/**
 * 数组结构体
 */
object ArrayAndStruct {
  def main(args: Array[String]): Unit = {
    val sparkSession = CommonUtils.getLocalSparkSession()
    val tableData: DataFrame = sparkSession.read.json("C:\\Users\\hry\\IdeaProjects\\spark-entry-to-mastery\\Spark-introduction-to-actual-combat\\realtime\\src\\test\\scala\\ArrAndStruct.json")
//    val df = tableData.toDF()
//    df.show(10,false)
    import org.apache.spark.sql.Dataset
    val mergeData = tableData.withColumn("seat_info", arrays_zip(col("seats.price"), col("seats.seatFlag"), col("seats.seatName"), col("seats.seats")).cast("array<struct<price:string,seat_flag:string,seat_name:string,seat_num:long>>")).drop("seats")
    //mergeData.printSchema()
    mergeData.show(100, false)
    val rdd = mergeData.rdd
    //rdd.foreach(x=>println(x))
  }
}
