package com.lvmama.rhino.analyze.processing

import com.lvmama.rhino.common.utils.Utils
import org.apache.spark.SparkContext
import org.apache.spark.sql.DataFrame
import org.apache.spark.sql.functions._
import com.lvmama.rhino.common.utils.JDBCUtil.Conversions.ImplicitInsert

/**
  * Created by Administrator on 2017/7/28.
  */
object PerformanceMonitorV2ApiNS {
  def process(sc : SparkContext,pageForward : DataFrame) : Unit = {
    //过滤出成功的请求
    val successRequest = pageForward.filter(!col("networkSpeed").equalTo("0"))
    val successData = successRequest.withColumn("speed_range",when(col("networkSpeed").between(0,50),1).when(col("networkSpeed").between(50,250),2)
      .when(col("networkSpeed").between(250,450),3).when(col("networkSpeed").between(450,750),4).when(col("networkSpeed").between(750,1500),5)
      .when(col("networkSpeed").between(1500,4096),6).when(col("networkSpeed").>(4096),7))
    //获取所有的数据
    val result =  successData.select(col("speed_range"))
      .groupBy(col("speed_range"))
      .agg( count("*").as("amount"))
    /**
      * 数据库插入数据
      */
    val yesterday =  Utils.getYesterday()
    result.select(col("speed_range"),col("amount"), lit(yesterday).as("create_date"))
      .insertDF2MysqlToWolverine("monitor_api_ns")
  }
}
