import com.google.gson.Gson
import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

/**
  * description:
  * Rabcheng
  * Date:2019/7/2 23:22
  **/
object ReduceByKey {


  def main(args: Array[String]): Unit = {

    val conf = new SparkConf()
      .setAppName(this.getClass.getSimpleName)
      .setMaster("local[*]")

    val sc = new SparkContext(conf)

    val data: RDD[String] = sc.textFile("C:\\Users\\54671\\Desktop\\ldh02.txt")

//    data.foreach(println)

    val value = data.map(json => {

      val gson = new Gson()
      val res = gson.fromJson(json, classOf[KafkaData])

      val cpid = res.cpid
      val pzid = res.pzid
      val sc_code = res.sc_code
      val jg = res.jg
      (cpid+pzid+sc_code+jg,1)
    })


    val result = value.reduceByKey(_+_)

    val l = result.count()

    println(l)

    result.foreach(println)


    sc.stop()

  }

}


case class KafkaData(
                     var is_dup:String,
                     var cpid:String,
                     var task_id:String,
                     var cp:String,
                     var dt:String,
                     var sc:String,
                     var pzid:String,
                     var cls_map:String,
                     var task_period:String,
                     var sc_code:String,
                     var pzname:String,
                     var lang:String,
                     var _SEED_URL:String,
                     var jg:String
                    )
