package tarns_result.old_version
import common.Tools.{getHDFSDirFiles, ifDirExistsDelete, jsonFormatEARBOOKTABLEDATA2008_FORMAL_REPORTGROUPER_GUOJI_2019_ZL}
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.SparkSession;
object YEARBOOKTABLEDATA2008_FORMAL_REPORTGROUPER_GUOJI_2019_ZLJsonToREC {
  def main(args: Array[String]): Unit = {
    val conf=new SparkConf().setAppName("CSYD_TABLEMETA_WORLD").setMaster("local[*]")
    val spark=SparkSession.builder().config(conf).getOrCreate()
    val sc=spark.sparkContext
//    val inputpath_sql="/tmp/InternationalData_Result/YEARBOOKTABLEDATA2008_FORMAL_REPORTGROUPER_GUOJI_2019_ZL/"
//    val outpath_result="hdfs://node01:8020/tmp/InternationalData_Result/YEARBOOKTABLEDATA2008_FORMAL_REPORTGROUPER_GUOJI_2019_ZL_RESULT"
    val inputpath_sql="file:///Users/kelisiduofu/IdeaProjects/zhiwang_file/jiaoyu/YEARBOOKTABLEDATA2008_FORMAL_REPORTGROUPER_GUOJI_2019_ZL/part-00000-e8b3cbc5-f468-4833-b9c1-ab5da2c7f1ef-c000.json"
    val outpath_result="file:///Users/kelisiduofu/IdeaProjects/zhiwang_file/jiaoyu/YEARBOOKTABLEDATA2008_FORMAL_REPORTGROUPER_GUOJI_2019_ZL_RESULT"
//    ifDirExistsDelete(outpath_result)
    //该路径下只有六个文件，但是该文件名不规则
    //6.将json串转化成REC格式(对应的中文列名)
//    val inpaths: Array[String] = getHDFSDirFiles(inputpath_sql)
//    val json_file: RDD[String] = sc.textFile(inpaths.mkString(","))
    val json_file: RDD[String] = sc.textFile(inputpath_sql)
    val REClines:RDD[String] = jsonFormatEARBOOKTABLEDATA2008_FORMAL_REPORTGROUPER_GUOJI_2019_ZL(json_file)
    //7.将结果写入到hdfs
//    REClines.repartition(5).saveAsTextFile(outpath_result)
    REClines.repartition(1).saveAsTextFile(outpath_result)
    sc.stop()
  }
}

