package aldwxstat

import java.sql.{Connection, DriverManager}

import org.apache.log4j.{Level, Logger}
import org.apache.spark.sql.functions.countDistinct
import org.apache.spark.sql.{DataFrame, Row, SparkSession}
import aldwxutils.{AldwxDebug, DBConf, TimeUtil}
import org.apache.spark.storage.StorageLevel

/**
  * Created by ald on 2017/08/02.
  * 二维码组的扫码人数,扫码次数,扫码新增
  */
object QrcodeGroupHourly {
  def main(args: Array[String]) {
    Logger.getLogger("org").setLevel(Level.WARN)
    AldwxDebug.debug_info("2017-11-06", "sunxiaowei")
    val spark = SparkSession
      .builder()
      .master("local[*]")
      .appName(this.getClass.getName)
      .config("spark.sql.shuffle.partitions", 12)
      .getOrCreate()

    // 获取 ald_code 的信息, ald_code 中的 qr_group_key 只在创建时写入一次
    val qr_info_df = DBConf.read_from_mysql(spark, "(select app_key,qr_key,qr_group_key from ald_code) as code_df")

    // 获取二维码组的信息
    // val qr_group_df = DBConf.read_from_mysql(spark, "(select app_key,qr_group_key from ald_qr_group) as ald_qr_group_df")

    // 二维码的全部信息
//    val qr_info_df = code_df.join(
//      qr_group_df,
//      code_df("app_key") === qr_group_df("app_key") &&
//      code_df("qr_group_key") === qr_group_df("qr_group_key")
//    ).select(
//      code_df("app_key"),
//      code_df("qr_key"),            // 二维码的 key
//      qr_group_df("qr_group_key")  // 二维码组的 key
//    )

    qr_info_df.show()

    // 获取指定日期
    val the_day = TimeUtil.processArgs(args)
    println(the_day)

    //val df:DataFrame = spark.read.parquet(s"${DBConf.hdfsUrl}/${the_day}/*/part-*").repartition(120).cache()

    val df:DataFrame = spark.read.json("d:/test/20171110akjson/")//.repartition(120)
    //val df = parquetF.filter("ak='b2a13b2134f3ef03dc8556d678ad0f0a'")
    //val df = spark.read.json("d:/test/json")
    //df.show()
    // 判断某天是否上报二维码数据
    if (!df.columns.contains("ag_aldsrc")) return

      // 二维码数据源
    val page_df = df.filter("ev='page' and ag_aldsrc !='null' and qr !='null'")
                    .select(
                      df("ak").alias("app_key"),
                      df("qr").alias("qr_key"),
                      df("hour"),
                      df("at"),
                      df("uu")
                    ).cache()




    // 二维码组每小时的扫码人数,扫码次数
    val grouped_df = qr_info_df.join(
        page_df,
        qr_info_df("app_key") === page_df("app_key") &&
        qr_info_df("qr_key") === page_df("qr_key")
    ).select(
      qr_info_df("app_key"),
      qr_info_df("qr_group_key"),
      page_df("hour"),
      page_df("at"),
      page_df("uu")
    ).groupBy("app_key", "qr_group_key", "hour")
      .agg(
        countDistinct("uu") as "qr_visitor_count",
        //countDistinct("at") as "qr_scan_count",
        page_df("at") as "dd"
      )

    grouped_df.show()

       // 新访问用户
     val ifo_df = df.filter("ev='app' and ifo='true'")
      .select(
        df("ak").alias("app_key"),
        df("hour"),
        df("at"),
        df("uu")
      ).cache()


    // 每个小时的新访问用户
    val qr_newer_df = page_df.join(
      ifo_df,
      page_df("app_key")===ifo_df("app_key") &&
      page_df("hour")===ifo_df("hour") &&
      page_df("at")===ifo_df("at") &&
      page_df("uu")===ifo_df("uu")
    ).select(
      page_df("app_key"),
      page_df("qr_key"),
      page_df("hour"),
      page_df("at"),
      ifo_df("uu")
    )

    // 二维码组每小时的扫码带来新增
    val qr_group_new_df = qr_newer_df.join(
      qr_info_df,
      qr_info_df("app_key") === qr_newer_df("app_key") &&
      qr_info_df("qr_key") === qr_newer_df("qr_key"),
      "leftouter"
    ).select(
      qr_newer_df("app_key"),
      qr_newer_df("hour"),
      qr_newer_df("uu"),
      qr_info_df("qr_group_key")
    ).groupBy("app_key", "hour", "qr_group_key")
     .agg(
        countDistinct("uu").alias("qr_newer_count")
      )

    qr_group_new_df.show()

    // 形成结果表
    val _result_df = grouped_df.join(
      qr_group_new_df,
      grouped_df("app_key")===qr_group_new_df("app_key") &&
      grouped_df("qr_group_key")===qr_group_new_df("qr_group_key") &&
      grouped_df("hour")===qr_group_new_df("hour")
    ).select(
      grouped_df("app_key"),
      grouped_df("hour"),
      grouped_df("qr_group_key"),
      grouped_df("qr_visitor_count"),
      grouped_df("qr_scan_count"),
      qr_group_new_df("qr_newer_count")
    ).na.fill(0)

    _result_df.show()

    /*
     * 计算每天二维码组的扫码人数, 扫码次数, 扫码新增
     */

    // 二维码组每天的扫码人数和扫码次数
    val daily_grouped_df = qr_info_df.join(
      page_df,
      qr_info_df("app_key") === page_df("app_key") &&
      qr_info_df("qr_key") === page_df("qr_key")
    ).select(
      qr_info_df("app_key"),
      qr_info_df("qr_group_key"),
      page_df("at"),
      page_df("uu")
    ).groupBy("app_key", "qr_group_key")
      .agg(
        countDistinct("uu") as "qr_visitor_count",
        countDistinct("at") as "qr_scan_count"
      )

    daily_grouped_df.show()

    // 每天的新访问用户
    val qr_daily_newer_df = page_df.join(
      ifo_df,
      page_df("app_key")===ifo_df("app_key") &&
        page_df("at")===ifo_df("at") &&
        page_df("uu")===ifo_df("uu")
    ).select(
      page_df("app_key"),
      page_df("qr_key"),
      page_df("at"),
      ifo_df("uu")
    )

    // 二维码组每天的扫码带来新增
    val qr_group_daily_new_df = qr_daily_newer_df.join(
      qr_info_df,
      qr_info_df("app_key") === qr_daily_newer_df("app_key") &&
      qr_info_df("qr_key")  === qr_daily_newer_df("qr_key"),
      "leftouter"
    ).select(
      qr_daily_newer_df("app_key"),
      qr_daily_newer_df("uu"),
      qr_info_df("qr_group_key")
    ).groupBy("app_key", "qr_group_key")
      .agg(
        countDistinct("uu").alias("qr_newer_count")
      )

    qr_group_daily_new_df.show()

    // 形成结果表
    val _daily_result_df = daily_grouped_df.join(
      qr_group_daily_new_df,
      daily_grouped_df("app_key")===qr_group_daily_new_df("app_key") &&
        daily_grouped_df("qr_group_key")===qr_group_daily_new_df("qr_group_key")
    ).select(
      daily_grouped_df("app_key"),
      daily_grouped_df("qr_group_key"),
      daily_grouped_df("qr_visitor_count"),
      daily_grouped_df("qr_scan_count"),
      qr_group_daily_new_df("qr_newer_count")
    ).na.fill(0)

    _daily_result_df.show()

    //qrcodeHourlyGroupForeachPartition(the_day,_result_df)
    //qrcodeDailyGroupForeachPartition(the_day,_daily_result_df)

    spark.stop()
  }

  private def qrcodeHourlyGroupForeachPartition(day:String, qrcodeHourlyGroupDf:DataFrame) = {
    // 创建 mysql connect
    val url         = DBConf.url
    val username    = DBConf.user
    val password    = DBConf.password
    var connection:Connection = null
    val driver = "com.mysql.jdbc.Driver"
    val update_at = TimeUtil.ytoday()
    val the_day = day

    // 逐行入库
    qrcodeHourlyGroupDf.foreachPartition((rows:Iterator[Row]) => {

      try {
        Class.forName(driver)
        connection = DriverManager.getConnection(url,username,password)
        connection.setAutoCommit(false)
        val statement = connection.createStatement

        rows.foreach(r=>{
          val (app_key,hour,qr_group_key,qr_visitor_count,qr_scan_count,qr_newer_count) = ( r(0),r(1),r(2),r(3),r(4),r(5))

          val sql = s"insert into aldstat_hourly_qr_group (app_key,day,hour,qr_group_key,qr_visitor_count,qr_scan_count,qr_newer_count," +
            s"update_at)" +
            s"values ('${app_key}', '${the_day}', '${hour}', '${qr_group_key}', '${qr_visitor_count}', '${qr_scan_count}', '${qr_newer_count}', '${update_at}') ON DUPLICATE KEY UPDATE qr_visitor_count='${qr_visitor_count}', qr_scan_count='${qr_scan_count}', qr_newer_count='${qr_newer_count}',update_at='${update_at}'"
          statement.addBatch(sql)
        })
        statement.executeBatch
        connection.commit()
      } catch {
        case e: Exception => e.printStackTrace()
          connection.close()
      }
    })
  }

  private def qrcodeDailyGroupForeachPartition(day:String,qrcodeDailyGroupDf:DataFrame) = {

    val url         = DBConf.url
    val username    = DBConf.user
    val password    = DBConf.password
    var connection:Connection = null
    val driver = "com.mysql.jdbc.Driver"
    val update_at = TimeUtil.ytoday()
    val the_day  = day

    // 逐行入库
    qrcodeDailyGroupDf.foreachPartition((rows:Iterator[Row]) => {

      try {
        Class.forName(driver)
        connection = DriverManager.getConnection(url,username,password)
        connection.setAutoCommit(false)
        val statement = connection.createStatement

        rows.foreach(r=>{
          val (app_key,qr_group_key,qr_visitor_count,qr_scan_count,qr_newer_count) = ( r(0),r(1),r(2),r(3),r(4))

          val sql = s"insert into aldstat_daily_qr_group (app_key,day,qr_group_key,qr_visitor_count,qr_scan_count,qr_newer_count," +
            s"update_at)" +
            s"values ('${app_key}', '${the_day}', '${qr_group_key}', '${qr_visitor_count}', '${qr_scan_count}', '${qr_newer_count}', '${update_at}') ON DUPLICATE KEY UPDATE qr_visitor_count='${qr_visitor_count}', qr_scan_count='${qr_scan_count}', qr_newer_count='${qr_newer_count}',update_at='${update_at}'"
          statement.addBatch(sql)
        })
        statement.executeBatch
        connection.commit()
      } catch {
        case e: Exception => e.printStackTrace()
          connection.close()
      }
    })
  }
}


















