package com.lvmama.rhino.analyze.processing

import com.lvmama.rhino.common.utils.JDBCUtil.Conversions.ImplicitInsert
import org.apache.spark.SparkContext
import org.apache.spark.sql.DataFrame
import org.apache.spark.sql.functions._

/**
  * Created by wxlizhi on 2017/3/8.
  */
object H5PageMonitorOperators {
  def process(sc: SparkContext, request: DataFrame) : Unit = {
    val columns = Seq("ext_url", "operators", "time_interval", "load_time", "oper_date")

    val group = Seq("ext_url", "operators", "time_interval", "oper_date")

    //获取所有的数据
    val result = request.select(columns.map(col): _*)
      .groupBy(group.map(col): _*)
      .agg(sum(col("load_time")).as("load_time"),count("*").as("amount"))

    result.select(col("ext_url").as("url"),col("operators"),col("time_interval"),col("load_time"),col("amount"),col("oper_date").as("create_date"))
      .insertDF2MysqlToWolverine("h5_page_monitor_operators")
  }

}
