package com.json


import java.io.File

import org.apache.spark.sql.{DataFrame, Dataset, SparkSession}
import com.a.aaaa
import cn.hutool.json.{JSONObject, JSONUtil}
import org.apache.spark.rdd.RDD


object ss {
  def main(args: Array[String]): Unit = {
    val sparksession: SparkSession = SparkSession
      .builder()
      .appName("SparkSample")
      .master("local[*]")
      .getOrCreate()
    val path = "C:\\Users\\Raichard\\Desktop\\新建文件夹\\T_YHBZLXGLXT_JG_QYDWXX\\"
    val file = new File(path)
    val files: Array[File] = file.listFiles()
    val length: Int = files.length
    for (i <- files) {
      val name: String = i.getName
      val str: String = aaaa.getStr(path + name)
      val nObject: JSONObject = JSONUtil.parseObj(str)
      val string: String = nObject.get("cxjgxx").toString
      import sparksession.implicits._
      val ds: Dataset[String] = sparksession.createDataset(Seq(string))
      val rdd: RDD[String] = ds.rdd
      val frame: DataFrame = sparksession.read.json(rdd)
      println("----------------------------------------------------------")

      frame.show()
    }

  }
}
