package com.lvmama.rhino.analyze.processing

import org.apache.spark.SparkContext
import org.apache.spark.sql.DataFrame
import org.apache.spark.sql.functions._
import com.lvmama.rhino.common.utils.JDBCUtil.Conversions._
import com.lvmama.rhino.common.utils.Utils

/**
 * 系统监控，接口失败情况清洗
  * 对应数据库
 */
object PerformanceMonitorFailure {
  
  def process(sc : SparkContext,pageForward : DataFrame) : Unit = {
    val columns = Seq("apiName", "apiVersion", "requestType", "operators", "networkType", "province", "tec", "errorCode", "failType")

    val group = Seq("apiName", "apiVersion", "requestType", "operators", "networkType", "province", "tec", "errorCode", "failType")

    //过滤出失败的请求
    val failureRequest = pageForward.filter(col("failType") !== "0").select(columns.map(col): _*)

    //获取所有的数据
    val failureResult = failureRequest.select(columns.map(col): _*)
        .groupBy(group.map(col): _*)
          .agg(count("*").as("amount"))

    /**
      * 数据库插入数据
      */
    val yesterday = Utils.getYesterday()
    failureResult.select(col("apiName").as("api_name"), col("apiVersion").as("api_version"), col("requestType").as("method"), col("operators"),
      col("networkType").as("network_type"), col("province"), col("tec").as("app_type"), col("errorCode").as("failure_code"),
      col("failType").as("failuer_type"), col("amount"), lit(yesterday).as("oper_date")).insertDF2MysqlDirect("monitor_api_failure")
    
  }
}