package com.navinfo.platform.examples.parquet

import com.navinfo.platform.examples.carbondata.{ComplexTypeData, StructElement}
import com.navinfo.platform.qingqi.protocol.common.{GsonUtil, InterEventData}
import org.apache.spark.sql.{SaveMode, SparkSession}
import com.navinfo.platform.qingqi.protocol.java.EventDataPb

case class StructElement(school: Array[String], age: Int)
case class ComplexTypeData(id: Int, name: String, city: String, salary: Float, file: StructElement)

case class PBComplexTypeData(startGpsTime: Long,stopGpsTime: Long,event: String, duration: Long)

object MyParquetSample {
  def main(args: Array[String]): Unit = {

    val spark = SparkSession
      .builder()
      .master("local[4]")
      .appName("MyParquetSample")
      .getOrCreate()

    import spark.implicits._



    val sc = spark.sparkContext
    // generate data
    val df = sc.parallelize(Seq(
      ComplexTypeData(1, "index_1", "city_1", 10000.0f,
        StructElement(Array("struct_11", "struct_12"), 10)),
      ComplexTypeData(2, "index_2", "city_2", 20000.0f,
        StructElement(Array("struct_21", "struct_22"), 20)),
      ComplexTypeData(3, "index_3", "city_3", 30000.0f,
        StructElement(Array("struct_31", "struct_32"), 30))
    )).toDF

    df.printSchema()

    df.show()


    df.write.partitionBy("name").mode(SaveMode.Overwrite).format("parquet").save("/user/root/namesPartByColor.parquet")

    println("--------------------------")
    println(GsonUtil.gsonString(EventDataPb.EventData.newBuilder().setDuration(1000L).
      setEvent(EventDataPb.EventType.brake).setStartGpsTime(1L).setStopGpsTime(2L).build()))
    println("--------------------------")

    val tt = EventDataPb.EventData.newBuilder().setDuration(1000L).
      setEvent(EventDataPb.EventType.brake).setStartGpsTime(1L).setStopGpsTime(2L).build()

    val pb_df = sc.parallelize(Seq(
      PBComplexTypeData(tt.getStartGpsTime,tt.getStopGpsTime,tt.getEvent.toString,tt.getDuration)
    )).toDF()

    pb_df.printSchema()

    pb_df.show()

    //---------------------------------read--------------------------------------
    val parquetFileDF = spark.read.parquet("/user/root/namesPartByColor.parquet")

    parquetFileDF.printSchema()

    parquetFileDF.select($"name", $"salary" + 1).show()

  }

}
