package com.lvmama.rhino.analyze.nginx

import com.lvmama.rhino.common.entity.JobConfig
import com.lvmama.rhino.common.utils.JDBCUtil.{ConnectionPool, JDBCTemplate}
import com.lvmama.rhino.common.utils.Utils
import com.lvmama.rhino.common.utils.spark.SparkApplication
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileSystem, Path}
import org.apache.spark.sql.SQLContext

/**
  * Created by yuanxiaofeng on 2016/7/21.
  *
  * 码上游下单页面PV，UV统计
  */
class MsyPageStat(conf: JobConfig, path: String) extends SparkApplication{

  override var appName: String = "MsyPageStatJob"
  override var sparkConfig: Map[String, String] = conf.spark

  def execute(): Unit = {
    sparkConfig += ("spark.app.name" -> appName)
    sparkConfig += ("spark.master" -> "local[2]")

    val JDBCDefault = JDBCTemplate.JDBCDefaultSet
    val connP = JDBCTemplate.getConProperties
    withSparkContext{ sc =>
      val sqlCtx = new SQLContext(sc)
      import sqlCtx.implicits._

      val conf = new Configuration()
      val hdfs = FileSystem.get(conf)
      val hdfsFile = new Path(path)
      if (!hdfs.exists(hdfsFile)){
//        println("MsyPageStatJob path not exists")
        sc.stop()
      }
      val msyLog = sc.textFile(path + Utils.getYesterday("yyyy/MM/dd")).map(l => Utils.initMsy(l)).toDS()

//      msyLog.show(10)
      //val appPv = msyLog.map(m => "app".equals(m.first_channel)).count()
      //在线 APP
      val appOnPv = msyLog.filter(m => "APP".equals(m.first_channel) && m.lvsessionId.nonEmpty).count()
      val appOnUv = msyLog.filter(m => "APP".equals(m.first_channel) && m.lvsessionId.nonEmpty).map(m => m.udid).distinct.count()

      //离线APP
      val appOffPv = msyLog.filter(m => "APP".equals(m.first_channel) && m.lvsessionId.isEmpty).count()
      val appOffUv = msyLog.filter(m => "APP".equals(m.first_channel) && m.lvsessionId.isEmpty).map(m => m.udid).distinct.count()

      //WAP
      val wapPv = msyLog.filter(m => "WAP".equals(m.first_channel)).count()
      val wapUv = msyLog.filter(m => "WAP".equals(m.first_channel)).map(m => m.lvsessionId).distinct.count()
      //val uv = msyLog.map(m => m.udid).distinct.count()

      val conn = ConnectionPool.getMysqlManager.getConnection
      val statement = conn.createStatement
      try {
        statement.executeUpdate(s"insert into msy_stat(pv, uv, first_channel, off_on) values (${appOnPv.toString}, ${appOnUv.toString}, APP, ON)")
        statement.executeUpdate(s"insert into msy_stat(pv, uv, first_channel, off_on) values (${appOffPv.toString}, ${appOffUv.toString}, APP, OFF)")
        statement.executeUpdate(s"insert into msy_stat(pv, uv, first_channel, off_on) values (${wapPv.toString}, ${wapUv.toString}, WAP, )")
      } catch {
        case e: Exception => e.printStackTrace()
      } finally {
        if (statement != null)
          statement.close()
        if (conn != null)
          conn.close()
      }

    }
  }
}

object MsyPageStat {
  val config = JobConfig()
  def apply(path: String) = new MsyPageStat(config, path).execute()
}
