package cn.doit.sparkcore2json

import com.alibaba.fastjson.{JSON, JSONObject}
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.sql.SQLContext

/**
  * Created by ZHAOXUHUA 
  * .           on 2018/11/28.
  */
object Sparkcore2Json2 {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()
      .setMaster("local[*]")
      .setAppName(this.getClass.getSimpleName)
    val sc = new SparkContext(conf)
    //操作DataFrame必须有一个SQLContext的实例
    val sqlContext = new SQLContext(sc)
    val file1 = sqlContext.read.parquet("D:/学习专用/项目/项目四资料/data/dophinData")
    //sparkcore
    val map = file1.map(t => ((t.get(24), t.get(25)), 1))
    val key = map.reduceByKey(_+_).sortBy(t=>t._2)
   // key.foreach(println)
    //将结果转成json格式保存
    val nObject: JSONObject = new JSONObject()
    val map1 = key.map(f => {
      nObject.fluentPut(f._1.toString(), f._2)
    })
    map1.saveAsTextFile("dolphinjs.data")
    sc.stop()
  }
}