package com.lvmama.rhino.analyze.processing

import com.lvmama.rhino.common.entity.{H5, NATIVE}
import com.lvmama.rhino.common.utils.JDBCUtil.Conversions.ImplicitInsert
import com.lvmama.rhino.common.utils.Utils
import org.apache.spark.SparkContext
import org.apache.spark.sql.catalyst.expressions.Literal
import org.apache.spark.sql.{Column, DataFrame}
import org.apache.spark.sql.functions._
/**
 * 系统监控，接口数据清洗
 */
object PerformanceMonitorApi {
  
  def process(sc : SparkContext,pageForward : DataFrame) : Unit = {
    val columns = sc.broadcast(Seq("apiName", "apiVersion", "requestType","operators", "firstTime", "transferTime", "dataSize", "tec", "networkType", "apiTime"))
    val contactColumns = sc.broadcast(Seq("apiName", "apiVersion", "requestType","operators", "firstTime", "transferTime", "dataSize", "tec", "networkType", "apiTime", "concatCol"))
    val group = sc.broadcast(Seq("apiName", "apiVersion", "requestType", "operators", "tec", "networkType", "concatCol"))
//    pageForward.show()
    //过滤出成功的请求
    val successRequest = pageForward.filter(col("failType").equalTo("0")).select(columns.value.map(col(_)): _*)
                          .withColumn("concatCol",concat(col("apiName") , col("apiVersion") , col("requestType"), col("operators"), col("tec"), col("networkType")))
    val successResult = successRequest.select(contactColumns.value.map(col(_)): _*)
                          .groupBy(group.value.map(col(_)): _*)
                          .agg(sum(col("firstTime")).as("firstTime"), sum(col("transferTime")).as("transferTime"),
                          sum(col("dataSize")).as("dataSize"), sum(col("apiTime")).as("apiTime"),count("*").as("successAmount"))

    //过滤出非成功的请求
    val errorRequest = pageForward.filter(!col("failType").equalTo("0")).select(columns.value.map(col(_)): _*)
                      .withColumn("concatCol",concat(col("apiName") , col("apiVersion") , col("requestType"), col("operators"), col("tec"), col("networkType")))
    val errorResult = errorRequest.select(contactColumns.value.map(col(_)): _*)
                      .groupBy(group.value.map(col(_)): _*).agg(count("*").as("failAmount"))
    //数据整合
    val result = successResult.join(errorResult, successResult.col("concatCol")===errorResult.col("concatCol"), "left_outer").select(successResult.col("*"), errorResult.col("failAmount"))

    val totalResult = result.na.fill(0, Seq("failAmount")).withColumn("amount",col("failAmount") + col("successAmount"))

    /**
    * 数据库插入数据
    */
    val yesterday =  Utils.getYesterday()
    totalResult.select(col("apiName").as("api_name"), col("apiVersion").as("api_version"), col("requestType").as("method"), col("operators"),col("networkType").as("network_type"),
      col("tec").as("app_type"),col("firstTime").as("first_package_time"),col("transferTime").as("transfer_time"), col("dataSize").as("package_size"),
      col("failAmount").as("api_fail_amount"),col("successAmount").as("api_success_amount"), col("apiTime").as("api_time"), col("amount"), lit(yesterday).as("oper_date"))
      .insertDF2MysqlDirect("monitor_api")
  }

}