package tarns_result.old_version

import common.Tools.get_json_file
import org.apache.spark.SparkConf
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.SparkSession

object ToJson {
  //将changeCode里输出的结果转换成json串
  def main(args: Array[String]): Unit = {
    val conf=new SparkConf().setAppName("ToJson").setMaster("local[*]")
    val spark=SparkSession.builder().config(conf).getOrCreate()
    val sc=spark.sparkContext
    //    val inpath ="/tmp/InternationalData_Result/IBRD2/-"
    //    val outpath_json="hdfs://node01:8020/tmp/InternationalData_Result/HDFSToJson"
//    val inpath = "file:///Users/kelisiduofu/file/S2X"
//    val outpath_json = "file:///Users/kelisiduofu/file/HDFSToJson2"

//    val inpath = "file:///Users/kelisiduofu/IdeaProjects/zhiwang_file/jiaoyu/EducationStatistics_1/part-00000"
//    val outpath_json = "file:///Users/kelisiduofu/IdeaProjects/zhiwang_file/jiaoyu/EducationStatistics_ToJson"
    val inpath = "file:///Users/kelisiduofu/IdeaProjects/zhiwang_file/jiaoyu/WorldDevelopmentIndicators_1/part-00000"
    val outpath_json = "file:///Users/kelisiduofu/IdeaProjects/zhiwang_file/jiaoyu/WorldDevelopmentIndicators_ToJson"
    //1.读取一个文件
    val lines: RDD[String] = sc.textFile(inpath)
    //2将这个文件的每行转换成一个josn串,并写到hdfs上
    val json_line_result: RDD[String] = get_json_file(lines)
    json_line_result.repartition(1).saveAsTextFile(outpath_json)
    sc.stop()
  }
}
