package com.lvmama.rhino.streaming

import kafka.serializer.StringDecoder
//import net.sf.json.JSONObject
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.SparkConf


/**
  * Created by yuanxiaofeng on 2016/5/15.
  */
object SearchSuggest {
  private val AppName = "SearchSuggestStreaming"
  def execute(master: Option[String], args: List[String], jars: Seq[String] = Nil ): Unit ={
    val ssc = {
      val conf = new SparkConf().setAppName(AppName).setJars(jars)
      for (m <- master){
        conf.setMaster(m)
      }
      new StreamingContext(conf, Seconds(10))
    }

    val topic = Set("search_events")
    val brokers = "10.200.2.104:9092,10.200.2.113:9092,10.200.2.114:9092"
    val kafkaParams = Map[String, String](
      "metadata.broker.list" -> brokers, "serializer.class" -> "kafka.serializer.StringEncoder")
    val dbIndex = 1
//    val clickHashKey = "app::users::click"

//    val kafkaStream = KafkaUtils.createDirectStream[String, String, StringDecoder, StringDecoder](ssc, kafkaParams, topic)

//    val events = kafkaStream.flatMap(line => {
//      val data = JSONObject.fromObject(line._2)
//      println(data)
//      Some(data)
//    })

//    val search = events.map(x => (x.getString(""), x.getInt(""))).reduceByKey(_ + _)
//    search.foreachRDD(rdd => {
//      rdd.foreachPartition(partitionOfRecords => {
//        partitionOfRecords.foreach(pair => {
//
//        })
//      })
//    })

//    val search = events.map(x => (x.getString("key"), x.getString("click_count"))).reduceByKey(_ + _)
//    search.repartition(1).foreachRDD(rdd => {
//      rdd.map(r => r._1 + "," + r._2).saveAsTextFile("hdfs://Master.Hadoop:8020/stat_data/client-side/search_suggest/2016/05/15/")
//    })

//    val search = events.map(x => (x.getString("key"), x.getInt("click_count")))
//      .reduceByKeyAndWindow((a : Int, b: Int) => a + b, Seconds(60), Seconds(10))
//    search.repartition(1).foreachRDD(rdd => rdd.collect())
//    ssc.start()
//    ssc.awaitTermination()
  }
}
