package com.lvmama.rhino.analyze.processing

import com.lvmama.rhino.common.utils.JDBCUtil.Conversions.ImplicitInsert
import org.apache.spark.SparkContext
import org.apache.spark.sql.DataFrame
import org.apache.spark.sql.functions._
import com.lvmama.rhino.common.utils.Utils

/**
 * 系统监控，接口数据分地区清洗
  * 对应数据库
 */
object PerformanceMonitorRegion {
  
  def process(sc : SparkContext,pageForward : DataFrame) : Unit = {
    //过滤出成功的请求
    val successRequest = pageForward.filter(col("failType").equalTo("0"))
    //获取所有的数据
    val result =  successRequest.select(col("province"), col("operators"), col("networkType"), col("tec"), col("networkTime"), col("transferSpeed"), col("networkSpeed"), col("totalTime"),
      col("transferTime"), col("firstTime"), col("deviceToken"))
      .groupBy(col("province"), col("operators"), col("networkType"), col("tec"))
      .agg(sum(col("networkTime")).as("networkTime"),sum(col("transferSpeed")).as("transferSpeed"),sum(col("networkSpeed")).as("networkSpeed"),sum(col("totalTime")).as("totalTime"),
           sum(col("transferTime")).as("transferTime"), sum(col("firstTime")).as("firstTime"), countDistinct(col("deviceToken")).as("deviceNum"),count("*").as("amount"))
    /**
      * 数据库插入数据
      */
    val yesterday =  Utils.getYesterday()
    result.select(col("province"), col("operators"), col("networkType").as("network_type"), col("tec").as("app_type"),col("networkTime").as("network_time"),
      col("transferSpeed").as("transfer_speed"),col("networkSpeed").as("network_speed"),col("totalTime").as("api_time"), col("deviceNum").as("device_num"),
      col("transferTime").as("transfer_time"), col("firstTime").as("first_package_time"), col("amount"), lit(yesterday).as("oper_date"))
      .insertDF2MysqlDirect("monitor_region")
  }
}