package org.spark.api.example

import com.google.common.io.Closeables
import org.apache.commons.codec.digest.DigestUtils
import org.apache.hadoop.hbase.client.Scan
import org.apache.hadoop.hbase.util.Bytes
import org.apache.spark.SparkContext
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{Row, SparkSession}
import org.apache.spark.sql.types.{DoubleType, IntegerType, LongType, StringType, StructField, StructType}
import org.spark.api.config.HBaseInstance
import org.spark.api.constant.{DyColumn, DyTable}
import org.spark.api.util.{HBaseUtil, MsgPackReader, MsgPackWriter}
import org.spark.api.utils.{CsvExportUtil, ResultUtil, SparkContextUtil}

/**
 *
 */
object DetailsProductAuthor {


  def buildProductDateAuthorRoomScan(productId: String): Scan = {

    val writer = MsgPackWriter.create()
    try {
      HBaseUtil.buildScan().setCacheBlocks(false)
        .addColumn(DyColumn.FAMILY, DyColumn.DetailsProductDateAuthorRoom.AUTHOR_ID)
        .addColumn(DyColumn.FAMILY, DyColumn.DetailsProductDateAuthorRoom.PRODUCT_ID)
        .addColumn(DyColumn.FAMILY, DyColumn.DetailsProductDateAuthorRoom.ROOM_ID)
        .addColumn(DyColumn.FAMILY, DyColumn.DetailsProductDateAuthorRoom.DATE)
        .addColumn(DyColumn.FAMILY, DyColumn.DetailsProductDateAuthorRoom.VOLUME)
        .addColumn(DyColumn.FAMILY, DyColumn.DetailsProductDateAuthorRoom.AMOUNT)
        .setRowPrefixFilter(writer.from(DigestUtils.sha1Hex(productId)))
    } finally {
      writer.close()
    }

  }

  def buildProductDateAuthorAwemeScan(productId: String): Scan = {
    val writer = MsgPackWriter.create()
    try {
      HBaseUtil.buildScan().setCacheBlocks(false)
        .addColumn(DyColumn.FAMILY, DyColumn.DetailsProductDateAuthorAweme.AUTHOR_ID)
        .addColumn(DyColumn.FAMILY, DyColumn.DetailsProductDateAuthorAweme.PRODUCT_ID)
        .addColumn(DyColumn.FAMILY, DyColumn.DetailsProductDateAuthorAweme.AWEME_ID)
        .addColumn(DyColumn.FAMILY, DyColumn.DetailsProductDateAuthorAweme.DATE)
        .addColumn(DyColumn.FAMILY, DyColumn.DetailsProductDateAuthorAweme.VOLUME)
        .addColumn(DyColumn.FAMILY, DyColumn.DetailsProductDateAuthorAweme.AMOUNT)
        .setRowPrefixFilter(writer.from(DigestUtils.sha1Hex(productId)))
    } finally {
      writer.close()
    }
  }

  def getProductDateAuthorRoomRDD(productId: String, sc: SparkContext): RDD[((String, String, String), (String, Long, Double))] = {

    SparkContextUtil.hbaseRDD(sc, HBaseInstance.HUGE.getConfiguration, DyTable.DETAILS_AUTHOR_DATE_PRODUCT_ROOM.getTableName, buildProductDateAuthorRoomScan(productId)).mapPartitions(iterator => {

      val reader = MsgPackReader.create()
      try {
        iterator.flatMap(result => {
          for (
            authorId <- ResultUtil.getValue(result, DyColumn.FAMILY, DyColumn.DetailsProductDateAuthorRoom.AUTHOR_ID, Bytes.toString);
            productId <- ResultUtil.getValue(result, DyColumn.FAMILY, DyColumn.DetailsProductDateAuthorRoom.PRODUCT_ID, Bytes.toString);
            roomId <- ResultUtil.getValue(result, DyColumn.FAMILY, DyColumn.DetailsProductDateAuthorRoom.ROOM_ID, Bytes.toString);
            date <- ResultUtil.getValue(result, DyColumn.FAMILY, DyColumn.DetailsProductDateAuthorRoom.DATE, Bytes.toString);
            volume <- ResultUtil.getValue(result, DyColumn.FAMILY, DyColumn.DetailsProductDateAuthorRoom.VOLUME, reader.getLong);
            amount <- ResultUtil.getValue(result, DyColumn.FAMILY, DyColumn.DetailsProductDateAuthorRoom.AMOUNT, reader.getDouble)
          ) yield {
            (productId, authorId, roomId) -> (date, volume, amount)
          }
        })
      } finally {
        Closeables.close(reader, true)
      }


    })

  }

  def getProductDateAuthorAwemeRDD(productId: String, sc: SparkContext): RDD[((String, String, String), (String, Long, Double))] = {

    SparkContextUtil.hbaseRDD(sc, HBaseInstance.HUGE.getConfiguration, DyTable.DETAILS_AUTHOR_DATE_PRODUCT_AWEME.getTableName, buildProductDateAuthorAwemeScan(productId)).mapPartitions(iterator => {
      val reader = MsgPackReader.create()
      try {
        iterator.flatMap(result => {
          for (
            authorId <- ResultUtil.getValue(result, DyColumn.FAMILY, DyColumn.DetailsProductDateAuthorAweme.AUTHOR_ID, Bytes.toString);
            productId <- ResultUtil.getValue(result, DyColumn.FAMILY, DyColumn.DetailsProductDateAuthorAweme.PRODUCT_ID, Bytes.toString);
            roomId <- ResultUtil.getValue(result, DyColumn.FAMILY, DyColumn.DetailsProductDateAuthorAweme.AWEME_ID, Bytes.toString);
            date <- ResultUtil.getValue(result, DyColumn.FAMILY, DyColumn.DetailsProductDateAuthorAweme.DATE, Bytes.toString);
            volume <- ResultUtil.getValue(result, DyColumn.FAMILY, DyColumn.DetailsProductDateAuthorAweme.VOLUME, reader.getLong);
            amount <- ResultUtil.getValue(result, DyColumn.FAMILY, DyColumn.DetailsProductDateAuthorAweme.AMOUNT, reader.getDouble)
          ) yield {
            (productId, authorId, roomId) -> (date, volume, amount)
          }
        })
      } finally {
        Closeables.close(reader, true)
      }
    })
  }


  def buildDetailsProductAuthorRoomScan(productId: String): Scan = {
    val writer = MsgPackWriter.create()
    try {
      HBaseUtil.buildScan().setCacheBlocks(false)
        .addColumn(DyColumn.FAMILY, DyColumn.DetailsProductAuthorRoom.AUTHOR_ID)
        .addColumn(DyColumn.FAMILY, DyColumn.DetailsProductAuthorRoom.PRODUCT_ID)
        .addColumn(DyColumn.FAMILY, DyColumn.DetailsProductAuthorRoom.ROOM_ID)
        .addColumn(DyColumn.FAMILY, DyColumn.DetailsProductAuthorRoom.VOLUME)
        .addColumn(DyColumn.FAMILY, DyColumn.DetailsProductAuthorRoom.AMOUNT)
        .setRowPrefixFilter(writer.from(DigestUtils.sha1Hex(productId)))
    } finally {
      writer.close()
    }
  }

  def buildDetailsProductAuthorAwemeScan(productId: String): Scan = {
    val writer = MsgPackWriter.create()
    try {
      HBaseUtil.buildScan().setCacheBlocks(false)
        .addColumn(DyColumn.FAMILY, DyColumn.DetailsProductAuthorAweme.AUTHOR_ID)
        .addColumn(DyColumn.FAMILY, DyColumn.DetailsProductAuthorAweme.PRODUCT_ID)
        .addColumn(DyColumn.FAMILY, DyColumn.DetailsProductAuthorAweme.AWEME_ID)
        .addColumn(DyColumn.FAMILY, DyColumn.DetailsProductAuthorAweme.VOLUME)
        .addColumn(DyColumn.FAMILY, DyColumn.DetailsProductAuthorAweme.AMOUNT)
        .setRowPrefixFilter(writer.from(DigestUtils.sha1Hex(productId)))
    } finally {
      writer.close()
    }
  }


  def getProductAuthorRoomRDD(productId: String, sc: SparkContext): RDD[(String, (String, String, Long, Double))] = {

    SparkContextUtil.hbaseRDD(sc, HBaseInstance.HUGE.getConfiguration, DyTable.DETAILS_PRODUCT_AUTHOR_ROOM.getTableName, buildProductDateAuthorRoomScan(productId)).mapPartitions(iterator => {

      val reader = MsgPackReader.create()
      try {
        iterator.flatMap(result => {
          for (
            authorId <- ResultUtil.getValue(result, DyColumn.FAMILY, DyColumn.DetailsProductAuthorRoom.AUTHOR_ID, Bytes.toString);
            productId <- ResultUtil.getValue(result, DyColumn.FAMILY, DyColumn.DetailsProductAuthorRoom.PRODUCT_ID, Bytes.toString);
            roomId <- ResultUtil.getValue(result, DyColumn.FAMILY, DyColumn.DetailsProductAuthorRoom.ROOM_ID, Bytes.toString)

          ) yield {
            val volume = ResultUtil.getValue(result, DyColumn.FAMILY, DyColumn.DetailsProductAuthorRoom.VOLUME, reader.getLong).getOrElse(0L)
            val amount = ResultUtil.getValue(result, DyColumn.FAMILY, DyColumn.DetailsProductAuthorRoom.AMOUNT, reader.getDouble).getOrElse(0.0D)
            productId -> (authorId, roomId, volume, amount)
          }
        })
      } finally {
        Closeables.close(reader, true)
      }

    })

  }

  def getProductAuthorAwemeRDD(productId: String, sc: SparkContext): RDD[(String, (String, String, Long, Double))] = {

    SparkContextUtil.hbaseRDD(sc, HBaseInstance.HUGE.getConfiguration, DyTable.DETAILS_PRODUCT_AUTHOR_AWEME.getTableName, buildProductDateAuthorAwemeScan(productId)).mapPartitions(iterator => {
      val reader = MsgPackReader.create()
      try {
        iterator.flatMap(result => {
          for (
            authorId <- ResultUtil.getValue(result, DyColumn.FAMILY, DyColumn.DetailsProductAuthorAweme.AUTHOR_ID, Bytes.toString);
            productId <- ResultUtil.getValue(result, DyColumn.FAMILY, DyColumn.DetailsProductAuthorAweme.PRODUCT_ID, Bytes.toString);
            roomId <- ResultUtil.getValue(result, DyColumn.FAMILY, DyColumn.DetailsProductAuthorAweme.AWEME_ID, Bytes.toString)

          ) yield {
            val volume = ResultUtil.getValue(result, DyColumn.FAMILY, DyColumn.DetailsProductAuthorAweme.VOLUME, reader.getLong).getOrElse(0L)
            val amount = ResultUtil.getValue(result, DyColumn.FAMILY, DyColumn.DetailsProductAuthorAweme.AMOUNT, reader.getDouble).getOrElse(0.0D)
            productId -> (authorId, roomId, volume, amount)
          }
        })
      } finally {
        Closeables.close(reader, true)
      }
    })
  }


  def exportProductAuthorRoom(productId: String, spark: SparkSession): Unit = {
    val rdd = getProductAuthorRoomRDD(productId, spark.sparkContext).map {
      case (productId, (authorId, roomId, volume, amount)) => {
        Row(productId, authorId, roomId, volume, amount)
      }
    }.repartition(1)

    val schema = StructType(
      List(
        StructField("商品id", StringType, false),
        StructField("达人id", StringType, false),
        StructField("直播间id", StringType, false),
        StructField("销售量", LongType, false),
        StructField("销售额", DoubleType, false)
      )
    )

    val path = "/Users/chenwy/Desktop/chanmama/导出文件/excel/达人分析/直播"
    CsvExportUtil.`export`(spark, path, rdd, schema)
  }

  def exportProductAuthorAweme(productId: String, spark: SparkSession): Unit = {

    val rdd = getProductAuthorAwemeRDD(productId, spark.sparkContext).map {
      case (productId, (authorId, roomId, volume, amount)) => {
        Row(productId, authorId, roomId, volume, amount)
      }
    }.repartition(1)


    val schema = StructType(
      List(
        StructField("商品id", StringType, false),
        StructField("达人id", StringType, false),
        StructField("视频id", StringType, false),
        StructField("销售量", LongType, false),
        StructField("销售额", DoubleType, false)
      )
    )

    val path = "/Users/chenwy/Desktop/chanmama/导出文件/excel/达人分析/视频"

    CsvExportUtil.`export`(spark, path, rdd, schema)
  }

  def exportProductAuthor(productId: String, spark: SparkSession): Unit = {

    val rdd1 = getProductAuthorRoomRDD(productId, spark.sparkContext).map {
      case (productId, (authorId, roomId, volume, amount)) => {
        (productId, authorId) -> (Set(roomId), volume, amount)
      }
    }.reduceByKey {
      case ((set1, volume1, amount1), (set2, volume2, amount2)) => {
        (set1.union(set2), volume1 + volume2, amount1 + amount2)
      }
    }

    val rdd2 = getProductAuthorAwemeRDD(productId, spark.sparkContext).map {
      case (productId, (authorId, awemeId, volume, amount)) => {
        (productId, authorId) -> (Set(awemeId), volume, amount)
      }
    }.reduceByKey {
      case ((set1, volume1, amount1), (set2, volume2, amount2)) => {
        (set1.union(set2), volume1 + volume2, amount1 + amount2)
      }
    }

    val rdd = rdd1.fullOuterJoin(rdd2).map {
      case ((productId, authorId), (option1, option2)) => {

        val (set1, volume1, amount1) = option1.getOrElse((Set[String](), 0L, 0.0D))

        val (set2, volume2, amount2) = option2.getOrElse((Set[String](), 0L, 0.0D))

        Row(productId, authorId, set1.size, set2.size, set1.mkString(","), set2.mkString(","), volume1 + volume2, amount1 + amount2)
      }
    }

    val schema = StructType(
      List(
        StructField("商品id", StringType, false),
        StructField("达人id", StringType, false),
        StructField("直播数", IntegerType, false),
        StructField("视频数", IntegerType, false),
        StructField("关联直播id", StringType, false),
        StructField("关联视频id", StringType, false),
        StructField("销售量", LongType, false),
        StructField("销售额", DoubleType, false)
      )
    )

    val path = "/Users/chenwy/Desktop/chanmama/导出文件/excel/达人分析/达人"

    CsvExportUtil.`export`(spark, path, rdd, schema)

  }


  def main(args: Array[String]): Unit = {


    if (args.length == 0)
      return
    val productId = args(0)
    val spark = SparkSession.builder().appName("导出数据").master("local").getOrCreate()
    try {
//      exportProductAuthorRoom(productId, spark)
//      exportProductAuthorAweme(productId, spark)
      exportProductAuthor(productId, spark)
    } finally {
      spark.stop()
    }

  }

}
