package com.lvmama.rhino.analyze.processing

import com.lvmama.rhino.common.utils.Utils
import org.apache.spark.SparkContext
import org.apache.spark.sql.DataFrame
import org.apache.spark.sql.functions.{col, count, lit}
import com.lvmama.rhino.common.utils.JDBCUtil.Conversions.ImplicitInsert

/**
  * Created by Administrator on 2017/7/28.
  */
object PerformanceMonitorV2History {
  def process(sc : SparkContext,pageForward : DataFrame) : Unit = {
    //获取所有的数据
    val result =  pageForward.select(col("userId"), col("mobileNumber"), col("deviceName"), col("deviceToken"))
      .groupBy(col("userId"), col("mobileNumber"), col("deviceName"), col("deviceToken"))
      .agg(count("*").as("amount"))
    /**
      * 数据库插入数据
      */
    val yesterday =  Utils.getYesterday()
    result.select(col("userId").as("user_id"), col("mobileNumber").as("mobile_number"), col("deviceName").as("device_name"), col("deviceToken").as("device_token"),
      col("amount"), lit(yesterday).as("create_date"))
      .insertDF2MysqlToWolverine("monitor_api_history")
  }
}
