package com.etc.sparkstrea

import org.apache.spark.SparkConf
import org.apache.spark.sql.Row
import org.apache.spark.sql.hive.HiveContext
import org.apache.spark.sql.types.{IntegerType, StringType, StructField, StructType}
import org.apache.spark.streaming.{Seconds, StreamingContext}

object Top3HotProduct {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setAppName("Top3HotProduct").setMaster("local[*]")
    val sc = new StreamingContext(conf,Seconds(1))
    val unit = sc.socketTextStream("master",9999)
    val value = unit.map(a => (a.split(" ")(2) + "_" + a.split(" ")(1), 1))
      .reduceByKeyAndWindow(
        (v1: Int, v2: Int) => v1 + v2, Seconds(60), Seconds(10)
      )

    value.foreachRDD(a => {
      val value = a.map(a => {
        val category = a._1.split("_")(0)
        val product = a._1.split("_")(1)
        val count = a._2
        Row(category, product, count)
      })
      val structype = StructType(Array(
        StructField("category",StringType,true),
        StructField("product",StringType,true),
        StructField("click_count",IntegerType,true)
      ))
      val context = new HiveContext(a.context)
      val frame = context.createDataFrame(value,structype)
      frame.createOrReplaceGlobalTempView("user")
      context.sql("select category,product,click_count from (select category,product,click_count,row_number() over (partition by category order by click_count desc)as rank from user)tmp where rank<=3").show
    })
    sc.start()
    sc.awaitTermination()


  }
}
