package main.java.top_ip


import org.apache.log4j.{Level, Logger}
import org.apache.spark.sql.SparkSession
import org.apache.spark.storage.StorageLevel
import org.apache.spark.streaming.kafka.KafkaUtils
import org.apache.spark.streaming.{Seconds, StreamingContext}


/**
  * TopIpDemo
  *
  * @author zhangyimin
  *  2018-11-14 上午11:07
  * @version 1.0
  */
object TopIpDemo {

  def main(args: Array[String]): Unit = {
//    1,201.105.101.102,http://mystore.jsp/?productid=4,2017020021,3,1
    //创建SparkStreaming对象
    Logger.getLogger("org.apache.spark").setLevel(Level.ERROR)
    Logger.getLogger("org.eclipse.jetty.server").setLevel(Level.OFF)
    val sparkSession=SparkSession.builder().appName("top_ip").master("local[2]").getOrCreate()
    val sparkContext=sparkSession.sparkContext
    val streamingContext=new StreamingContext(sparkContext,batchDuration = Seconds(3))
    val sqlContext=sparkSession.sqlContext
    val kafkaStreams=KafkaUtils.createStream(
      streamingContext,
      "10.16.7.36:2181",
      "top_ip",
      Map("top_ip"->1),
      StorageLevel.MEMORY_ONLY_SER_2)
    val logRdd=kafkaStreams.map(_._2)
    import sqlContext.implicits._
    val resRdd=logRdd.foreachRDD(rdd=>{
       val resFilter=rdd.filter(_.split(",").length==6).map(_.split(",")).map(x=>{
         userClickInfo(x(0),x(1),x(2),x(3),x(4),x(5))
       }).toDF()
      resFilter.createOrReplaceTempView("user_click_info")
      sparkSession.sql("select ip ,count(ip) ip_count from user_click_info group by ip").show()
    })
    streamingContext.start()
    streamingContext.awaitTermination()
  }



  case class userClickInfo(user_id:String,ip:String,url:String,date:String,action:String,area_id:String)

}
