package com.lvmama.rhino.analyze.processing

import com.lvmama.rhino.common.utils.Utils
import org.apache.spark.SparkContext
import org.apache.spark.sql.DataFrame
import org.apache.spark.sql.functions._
import com.lvmama.rhino.common.utils.JDBCUtil.Conversions.ImplicitInsert

/**
  * Created by Administrator on 2017/7/28.
  */
object PerformanceMonitorV2Region {
  def process(sc : SparkContext,pageForward : DataFrame) : Unit = {
    //过滤出成功的请求
    val successRequest = pageForward.filter(col("errorCode").equalTo("1") || col("errorCode").equalTo("200"))
    //获取所有的数据
    val result =  successRequest.select(col("province"), col("city"), col("operators"), col("networkType"), col("tec"), col("networkTime"),col("deviceToken"))
      .groupBy(col("province"), col("city"), col("operators"), col("networkType"), col("tec"))
      .agg(sum(col("networkTime")).as("networkTime"), count("*").as("amount"))
    /**
      * 数据库插入数据
      */
    val yesterday =  Utils.getYesterday()
    result.select(col("province"), col("city"),col("operators"), col("networkType").as("network_type"), col("tec").as("app_type"),col("networkTime").as("network_time"),
       col("amount"), lit(yesterday).as("create_date"))
      .insertDF2MysqlToWolverine("monitor_api_region")
  }
}
