package org.spark.api.example

import com.google.common.io.Closeables
import org.apache.commons.codec.digest.DigestUtils
import org.apache.hadoop.hbase.client.Scan
import org.apache.hadoop.hbase.util.Bytes
import org.apache.spark.SparkContext
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.types.{DoubleType, LongType, StringType, StructField, StructType}
import org.apache.spark.sql.{Row, SparkSession}
import org.spark.api.config.HBaseInstance
import org.spark.api.constant.{DyColumn, DyTable}
import org.spark.api.util.{HBaseUtil, MsgPackReader, MsgPackWriter}
import org.spark.api.utils.{CsvExportUtil, DateUtils, ResultUtil, SparkContextUtil}
object DetailsAuthorProduct {

  type DetailsAuthorDateProductRDD = RDD[((String, String), (String, String, String, String, String, Double, Double, String, Long, Double))]

  def buildRowScan(scan: Scan, authorId: String, startDate: String, endDate: String): Scan = {
    val writer = MsgPackWriter.create()
    val shaHexAuthorId = DigestUtils.sha1Hex(authorId)
    val finalEndDate = DateUtils.getBeforeDay(endDate, 1)
    val startRow = writer.pack(shaHexAuthorId).pack(startDate).toByteArray
    val endRow = writer.reset().pack(shaHexAuthorId).pack(finalEndDate).toByteArray
    scan.withStartRow(startRow)
    scan.withStopRow(endRow)
    writer.close()
    scan
  }


  def buildDetailsAuthorDateProductRoomScan(authorId: String, startDate: String, endDate: String): Scan = {

    val scan = HBaseUtil.buildScan()
      .addColumn(DyColumn.FAMILY, DyColumn.DetailsAuthorDateProductRoom.VOLUME)
      .addColumn(DyColumn.FAMILY, DyColumn.DetailsAuthorDateProductRoom.AMOUNT)
      .addColumn(DyColumn.FAMILY, DyColumn.DetailsAuthorDateProductRoom.PRODUCT_TITLE)
      .addColumn(DyColumn.FAMILY, DyColumn.DetailsAuthorDateProductRoom.LABEL)
      .addColumn(DyColumn.FAMILY, DyColumn.DetailsAuthorDateProductRoom.FINAL_PRICE)
      .addColumn(DyColumn.FAMILY, DyColumn.DetailsAuthorDateProductRoom.PLATFORM)
      .addColumn(DyColumn.FAMILY, DyColumn.DetailsAuthorDateProductRoom.RATIO)
      .addColumn(DyColumn.FAMILY, DyColumn.DetailsAuthorDateProductRoom.PRODUCT_COVER)
      .addColumn(DyColumn.FAMILY, DyColumn.DetailsAuthorDateProductRoom.AUTHOR_ID)
      .addColumn(DyColumn.FAMILY, DyColumn.DetailsAuthorDateProductRoom.ROOM_ID)
      .addColumn(DyColumn.FAMILY, DyColumn.DetailsAuthorDateProductRoom.PRODUCT_ID)
      .addColumn(DyColumn.FAMILY, DyColumn.DetailsAuthorDateProductRoom.DATE)
      .setCacheBlocks(false)
    buildRowScan(scan, authorId, startDate, endDate)
    scan
  }


  def buildDetailsAuthorDateProductAwemeScan(authorId: String, startDate: String, endDate: String): Scan = {

    val scan = HBaseUtil.buildScan()
      .addColumn(DyColumn.FAMILY, DyColumn.DetailsAuthorDateProductAweme.VOLUME)
      .addColumn(DyColumn.FAMILY, DyColumn.DetailsAuthorDateProductAweme.AMOUNT)
      .addColumn(DyColumn.FAMILY, DyColumn.DetailsAuthorDateProductAweme.PRODUCT_TITLE)
      .addColumn(DyColumn.FAMILY, DyColumn.DetailsAuthorDateProductAweme.LABEL)
      .addColumn(DyColumn.FAMILY, DyColumn.DetailsAuthorDateProductAweme.FINAL_PRICE)
      .addColumn(DyColumn.FAMILY, DyColumn.DetailsAuthorDateProductAweme.PLATFORM)
      .addColumn(DyColumn.FAMILY, DyColumn.DetailsAuthorDateProductAweme.RATIO)
      .addColumn(DyColumn.FAMILY, DyColumn.DetailsAuthorDateProductAweme.PRODUCT_COVER)
      .addColumn(DyColumn.FAMILY, DyColumn.DetailsAuthorDateProductAweme.AUTHOR_ID)
      .addColumn(DyColumn.FAMILY, DyColumn.DetailsAuthorDateProductAweme.AWEME_ID)
      .addColumn(DyColumn.FAMILY, DyColumn.DetailsAuthorDateProductAweme.PRODUCT_ID)
      .addColumn(DyColumn.FAMILY, DyColumn.DetailsAuthorDateProductAweme.DATE)
      .setCacheBlocks(false)
    buildRowScan(scan, authorId, startDate, endDate)
    scan
  }


  def getDetailsAuthorDateProductRoom(authorId: String, startDate: String, endDate: String, sc: SparkContext): DetailsAuthorDateProductRDD = {

    SparkContextUtil.hbaseRDD(sc,
      HBaseInstance.HUGE.getConfiguration,
      DyTable.DETAILS_AUTHOR_DATE_PRODUCT_ROOM.getTableName,
      buildDetailsAuthorDateProductRoomScan(authorId, startDate, endDate))
      .mapPartitions(iterator => {
        val reader = MsgPackReader.create()
        try {
          iterator.flatMap(result => {
            for (
              volume <- ResultUtil.getValue(result, DyColumn.FAMILY, DyColumn.DetailsAuthorDateProductRoom.VOLUME, reader.getLong);
              amount <- ResultUtil.getValue(result, DyColumn.FAMILY, DyColumn.DetailsAuthorDateProductRoom.AMOUNT, reader.getDouble);
              productTitle <- ResultUtil.getValue(result, DyColumn.FAMILY, DyColumn.DetailsAuthorDateProductRoom.PRODUCT_TITLE, Bytes.toString);
              label <- ResultUtil.getValue(result, DyColumn.FAMILY, DyColumn.DetailsAuthorDateProductRoom.LABEL, Bytes.toString);
              finalPrice <- ResultUtil.getValue(result, DyColumn.FAMILY, DyColumn.DetailsAuthorDateProductRoom.FINAL_PRICE, reader.getDouble);
              platform <- ResultUtil.getValue(result, DyColumn.FAMILY, DyColumn.DetailsAuthorDateProductRoom.PLATFORM, Bytes.toString);
              ratio <- ResultUtil.getValue(result, DyColumn.FAMILY, DyColumn.DetailsAuthorDateProductRoom.RATIO, reader.getDouble);
              productCover <- ResultUtil.getValue(result, DyColumn.FAMILY, DyColumn.DetailsAuthorDateProductRoom.PRODUCT_COVER, Bytes.toString);
              authorId <- ResultUtil.getValue(result, DyColumn.FAMILY, DyColumn.DetailsAuthorDateProductRoom.AUTHOR_ID, Bytes.toString);
              roomId <- ResultUtil.getValue(result, DyColumn.FAMILY, DyColumn.DetailsAuthorDateProductRoom.ROOM_ID, Bytes.toString);
              productId <- ResultUtil.getValue(result, DyColumn.FAMILY, DyColumn.DetailsAuthorDateProductRoom.PRODUCT_ID, Bytes.toString);
              date <- ResultUtil.getValue(result, DyColumn.FAMILY, DyColumn.DetailsAuthorDateProductRoom.DATE, Bytes.toString)
            ) yield {
              (authorId, productId) -> (date, roomId, productTitle, platform, label, finalPrice, ratio, productCover, volume, amount)
            }
          })
        } finally {
          Closeables.close(reader, true)
        }
      })

  }


  def getDetailsAuthorDateProductAweme(authorId: String, startDate: String, endDate: String, sc: SparkContext): DetailsAuthorDateProductRDD = {
    SparkContextUtil.hbaseRDD(sc,
      HBaseInstance.HUGE.getConfiguration,
      DyTable.DETAILS_AUTHOR_DATE_PRODUCT_AWEME.getTableName,
      buildDetailsAuthorDateProductAwemeScan(authorId, startDate, endDate))
      .mapPartitions(iterator => {
        val reader = MsgPackReader.create()
        try {
          iterator.flatMap(result => {
            for (
              volume <- ResultUtil.getValue(result, DyColumn.FAMILY, DyColumn.DetailsAuthorDateProductAweme.VOLUME, reader.getLong);
              amount <- ResultUtil.getValue(result, DyColumn.FAMILY, DyColumn.DetailsAuthorDateProductAweme.AMOUNT, reader.getDouble);
              productTitle <- ResultUtil.getValue(result, DyColumn.FAMILY, DyColumn.DetailsAuthorDateProductAweme.PRODUCT_TITLE, Bytes.toString);
              label <- ResultUtil.getValue(result, DyColumn.FAMILY, DyColumn.DetailsAuthorDateProductAweme.LABEL, Bytes.toString);
              finalPrice <- ResultUtil.getValue(result, DyColumn.FAMILY, DyColumn.DetailsAuthorDateProductAweme.FINAL_PRICE, reader.getDouble);
              platform <- ResultUtil.getValue(result, DyColumn.FAMILY, DyColumn.DetailsAuthorDateProductAweme.PLATFORM, Bytes.toString);
              ratio <- ResultUtil.getValue(result, DyColumn.FAMILY, DyColumn.DetailsAuthorDateProductAweme.RATIO, reader.getDouble);
              productCover <- ResultUtil.getValue(result, DyColumn.FAMILY, DyColumn.DetailsAuthorDateProductAweme.PRODUCT_COVER, Bytes.toString);
              authorId <- ResultUtil.getValue(result, DyColumn.FAMILY, DyColumn.DetailsAuthorDateProductAweme.AUTHOR_ID, Bytes.toString);
              awemeId <- ResultUtil.getValue(result, DyColumn.FAMILY, DyColumn.DetailsAuthorDateProductAweme.AWEME_ID, Bytes.toString);
              productId <- ResultUtil.getValue(result, DyColumn.FAMILY, DyColumn.DetailsAuthorDateProductAweme.PRODUCT_ID, Bytes.toString);
              date <- ResultUtil.getValue(result, DyColumn.FAMILY, DyColumn.DetailsAuthorDateProductAweme.DATE, Bytes.toString)
            ) yield {
              (authorId, productId) -> (date, awemeId, productTitle, platform, label, finalPrice, ratio, productCover, volume, amount)
            }
          })
        } finally {
          Closeables.close(reader, true)
        }
      })
  }

  /**
   * 导出电商分析
   *
   * @param authorId
   * @param startDate
   * @param endDate
   * @param spark
   */
  def exportDetailsAuthorDateProductRoom(authorId: String, startDate: String, endDate: String, spark: SparkSession): Unit = {

    val rdd = getDetailsAuthorDateProductRoom(authorId, startDate, endDate, spark.sparkContext).map {
      case ((authorId, productId), (date, roomId, productTitle, platform, label, finalPrice, ratio, productCover, volume, amount)) => {
        Row(authorId, productId, date, roomId, productTitle, platform, label, finalPrice, ratio, productCover, volume, amount)
      }
    }

    val schema = StructType(
      List(
        StructField("达人id", StringType, false),
        StructField("商品id", StringType, false),
        StructField("日期", StringType, false),
        StructField("直播间id", StringType, false),
        StructField("商品名称", StringType, false),
        StructField("商品来源", StringType, false),
        StructField("商品分类", StringType, false),
        StructField("价格", DoubleType, false),
        StructField("佣金比例", DoubleType, false),
        StructField("商品封面", StringType, false),
        StructField("销售量", LongType, false),
        StructField("销售额", DoubleType, false)
      )
    )


    val path = "/Users/chenwy/Desktop/chanmama/导出文件/excel/电商分析/直播"

    CsvExportUtil.`export`(spark, path, rdd, schema)
  }

  /**
   * 导出电商分析
   *
   * @param authorId
   * @param startDate
   * @param endDate
   * @param spark
   */
  def exportDetailsAuthorDateProductAweme(authorId: String, startDate: String, endDate: String, spark: SparkSession): Unit = {
    val rdd = getDetailsAuthorDateProductAweme(authorId, startDate, endDate, spark.sparkContext).map {
      case ((authorId, productId), (date, awemeId, productTitle, platform, label, finalPrice, ratio, productCover, volume, amount)) => {
        Row(authorId, productId, date, awemeId, productTitle, platform, label, finalPrice, ratio, productCover, volume, amount)
      }
    }

    val schema = StructType(
      List(
        StructField("达人id", StringType, false),
        StructField("商品id", StringType, false),
        StructField("日期", StringType, false),
        StructField("视频id", StringType, false),
        StructField("商品名称", StringType, false),
        StructField("商品来源", StringType, false),
        StructField("商品分类", StringType, false),
        StructField("价格", DoubleType, false),
        StructField("佣金比例", DoubleType, false),
        StructField("商品封面", StringType, false),
        StructField("销售量", LongType, false),
        StructField("销售额", DoubleType, false)
      )
    )


    val path = "/Users/chenwy/Desktop/chanmama/导出文件/excel/电商分析/视频"

    CsvExportUtil.`export`(spark, path, rdd, schema)
  }


  def get(array: Array[(String, String, String, String, String)]): (String, String, String, String, String) = {
    array.reduce((a, b) => {
      val (date1, title1, platform1, label1, cover1) = a
      val (date2, title2, platform2, label2, cover2) = b
      date1 match {
        case _ if date1.compareTo(date2) >= 0 => (date1, title1, platform1, label1, cover1)
        case _ => (date2, title2, platform2, label2, cover2)
      }
    })

  }

  def transform(rdd: DetailsAuthorDateProductRDD): RDD[((String, String), (String, Array[String], String, String, String, Double, Double, String, Long, Double))] = {
    rdd.map {
      case ((authorId, productId), (date, roomId, productTitle, platform, label, finalPrice, ratio, productCover, volume, amount)) => {
        (authorId, productId) -> (date, Array[String](roomId), productTitle, platform, label, finalPrice, ratio, productCover, volume, amount)
      }
    }.reduceByKey {
      case ((date1, roomIds1, productTitle1, platform1, label1, finalPrice1, ratio1, productCover1, volume1, amount1), (date2, roomIds2, productTitle2, platform2, label2, finalPrice2, ratio2, productCover2, volume2, amount2)) => {
        val (date, title, platform, label, cover) = get(Array[(String, String, String, String, String)]((date1, productTitle1, platform1, label1, productCover1), (date2, productTitle2, platform2, label2, productCover2)))
        (date, roomIds1.union(roomIds2), title, platform, label, Math.min(finalPrice1, finalPrice2), Math.max(ratio1, ratio2), cover, volume1 + volume2, amount1 + amount2)
      }
    }
  }

  def getOrElse(option: Option[(String, Array[String], String, String, String, Double, Double, String, Long, Double)]): (String, Array[String], String, String, String, Double, Double, String, Long, Double) = {
    option.getOrElse("", Array[String](), "", "", "", Double.MaxValue, Double.MinValue, "", 0L, 0.0D)
  }

  def exportDetailsAuthorProduct(authorId: String, startDate: String, endDate: String, spark: SparkSession): Unit = {


    val rdd1 = transform(getDetailsAuthorDateProductRoom(authorId, startDate, endDate, spark.sparkContext))

    val rdd2 = transform(getDetailsAuthorDateProductAweme(authorId, startDate, endDate, spark.sparkContext))

    val rdd = rdd1.fullOuterJoin(rdd2).map {
      case ((authorId, productId), (option1, option2)) => {
        val (date1, roomIds1, title1, platform1, label1, finalPrice1, ratio1, cover1, volume1, amount1) = getOrElse(option1)
        val (date2, roomIds2, title2, platform2, label2, finalPrice2, ratio2, cover2, volume2, amount2) = getOrElse(option2)
        val (date, title, platform, label, cover) = get(Array[(String, String, String, String, String)]((date1, title1, platform1, label1, cover1), (date2, title2, platform2, label2, cover2)))
        val finalPrice = Math.min(finalPrice1, finalPrice2)
        val ratio = Math.max(ratio1, ratio2)
        val volume = volume1 + volume2
        val amount = amount1 + amount2
        Row(authorId, productId, title, platform, label, cover, finalPrice, ratio, volume, amount, roomIds1.mkString(","), roomIds2.mkString(","))
      }
    }

    val schema = StructType(
      List(
        StructField("达人id", StringType, false),
        StructField("商品id", StringType, false),
        StructField("商品名称", StringType, false),
        StructField("商品来源", StringType, false),
        StructField("商品分类", StringType, false),
        StructField("商品封面", StringType, false),
        StructField("价格", DoubleType, false),
        StructField("佣金比例", DoubleType, false),
        StructField("销售量", LongType, false),
        StructField("销售额", DoubleType, false),
        StructField("直播间id", StringType, false),
        StructField("视频id", StringType, false)
      )
    )

    val path = "/Users/chenwy/Desktop/chanmama/导出文件/excel/电商分析/商品"

    CsvExportUtil.`export`(spark, path, rdd, schema)

  }

  def main(args: Array[String]): Unit = {

    if (args.length < 3)
      return
    val authorId = args(0)
    val startDate = args(1)
    val endDate = args(2)
    val spark = SparkSession.builder().appName("导出数据").master("local").getOrCreate()
    try {
      exportDetailsAuthorDateProductRoom(authorId, startDate, endDate, spark)
      exportDetailsAuthorDateProductAweme(authorId, startDate, endDate, spark)
      exportDetailsAuthorProduct(authorId, startDate, endDate, spark)
    } finally {
      spark.stop()
    }

  }
}
