package com.ymy

import org.apache.spark.api.java.function.FlatMapFunction
import org.apache.spark.sql.catalyst.encoders.RowEncoder
import org.apache.spark.sql.types.{LongType, StringType, StructField, StructType}
import org.apache.spark.sql.{Row, SparkSession}
import org.apache.spark.{SparkConf, SparkContext}

import scala.collection.mutable

object AppTest {

  def main(args: Array[String]): Unit = {

     val spark =  SparkSession.builder()
      .appName("AppTest")
      .master("local[*]")
      .getOrCreate()

    import spark.implicits._
    import org.apache.spark.sql.functions._

    val df = spark.read.json("123.json")

    df.createOrReplaceTempView("table")
    val df2 = spark.sql("select explode(raw_data.members.transactions.calls)from table")
    val df3 = df2.select(explode($"col"))
    val df4 = df3.select($"col.call_type",$"col.init_type"
      ,$"col.net_type",$"col.other_cell_phone",$"col.place",$"col.start_time"
      ,$"col.subflow",$"col.subtotal",$"col.update_time",$"col.use_time")

    df4.createOrReplaceTempView("info")

    spark.sql("select * from info where start_time > '2018-12-17'").show(false)

    spark.stop()
  }

  def test()={

    val sparkConf = new SparkConf().setAppName("AppTest").setMaster("local[2]")
    val sc = new SparkContext(sparkConf)

    val rdd = sc.parallelize(Array(1,2,3))
    val cache = rdd.cache()

    cache.foreach(println)

    //     println(cache.toString())

    //    val bd = sc.broadcast(Array(1,2,3))
    //
    //    println(bd.value.mkString)

    //    val data = Array(1,2,3,4,5)
    //    val a = sc.parallelize(data)
    //    val x = a.cache()
    //
    //
    //
    //    println(x)

    sc.stop()
  }
}
