package main.scala.exec

import java.io.File

import com.alibaba.fastjson.JSON
import com.typesafe.config.{Config, ConfigFactory}
import org.apache.spark.sql.{DataFrame, Dataset, SparkSession}

/**
 * 本程序用于把本地json 格式数据写入到hive表
 */
object Json2hiveOffline {
  private var conf: Config = ConfigFactory.load("promotion.conf")
  def main(args: Array[String]): Unit = {
    if (args.length == 1){
      conf = ConfigFactory.parseFile(new File(args(0)))
    }

    val spark: SparkSession = SparkSession
      .builder().master("local[*]").appName("Json2HiveOffline").enableHiveSupport().getOrCreate()

    val initDataSet: Dataset[String] = spark.read.textFile("/Users/jackysong/IdeaProjects/spark/readTimeItemcenter/resource/movie.json")
    import spark.implicits._
    //implicit val matchError = org.apache.spark.sql.Encoders.STRING
    println(initDataSet.count(),"------<")
    val value: Dataset[String] = initDataSet.map(JSON.parseObject(_).getJSONArray("RECORDS").toString())
   // val frame1: DataFrame = spark.read.json("/Users/jackysong/IdeaProjects/spark/readTimeItemcenter/resource/exa_json.txt")
      val frame1: DataFrame = spark.read.json(value)

    frame1.toDF().createOrReplaceTempView("test")
    val frame: DataFrame = spark.sql("select director,movieId,name from test ")
    frame1.dtypes.foreach(println(_))

    val dtypeStr: Array[String] = for (elem <- frame1.dtypes) yield {
      elem.toString()
    }

    //frame.show(false)


   // spark.sql("select * from people_tbl limit 10").show()  import spark.implicit._
    spark.close()
  }
}
