package cn.spark.study.streaming

import org.apache.spark.SparkConf
import org.apache.spark.streaming.StreamingContext
import org.apache.spark.streaming.Seconds
import org.apache.spark.sql.Row
import org.apache.spark.sql.types.StructType
import org.apache.spark.sql.types.StructField
import org.apache.spark.sql.types.StringType
import org.apache.spark.sql.types.IntegerType
import org.apache.spark.sql.hive.HiveContext

object Top3HotProduct {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()
      .setAppName("Top3HotProduct")
      .setMaster("local[2]");
    
    val ssc = new StreamingContext(conf,Seconds(2))
    
    //点击日志：  leo, iphone phon
    ssc.socketTextStream("spark1", 9999).map { catPro => (catPro.split(" ")(1)+"_"+catPro.split(" ")(0),1) }
        .reduceByKeyAndWindow((v1:Int,v2:Int) =>{
          v1 + v2
        }, Seconds(60), Seconds(10)).foreachRDD(pari => {
          val catProRowRDD = pari.map(catPro => {
            val splitCatPor = catPro._1.split("_");
            Row(splitCatPor(0),splitCatPor(1),catPro._2)
          })
          
          val strcutType = StructType(Array(StructField("categroy",StringType,true),StructField("product",StringType,true),StructField("count",IntegerType,true)))
          val hiveC = new HiveContext(pari.context)
          hiveC.createDataFrame(catProRowRDD, strcutType).registerTempTable("categroy_product_count")
          val sql = "select categroy,product,count from ( " +
               "select categroy,product,count,row_number() over(partition by categroy order by count desc) rn from categroy_product_count ) tmp " +
               "where rn <= 3"
          hiveC.sql(sql).show()
          
        })
      ssc.start()
      ssc.awaitTermination()
  }
  
}