package com.twq.session

import com.twq.spark.session.{TrackerLog, TrackerSession}
import org.apache.hadoop.fs.{FileSystem, Path}
import org.apache.parquet.avro.{AvroParquetOutputFormat, AvroWriteSupport}
import org.apache.spark.SparkContext
import org.apache.spark.rdd.RDD

/**
 * 输出组件
 */
trait OutputComponent {

  /**
   * 保存结果数据的方法
   *
   * @param sc
   * @param baseOutputPath
   * @param parsedLogRDD
   * @param cookieLabeledSessionRDD
   */
  def writeOutputData(sc: SparkContext, baseOutputPath: String,
                      parsedLogRDD: RDD[TrackerLog], cookieLabeledSessionRDD: RDD[TrackerSession]) = {
    deleteIfExists(sc, baseOutputPath)
  }

  private def deleteIfExists(sc: SparkContext, trackerLogOutputPath: String) = {
    val path = new Path(trackerLogOutputPath)
    val fileSystem = path.getFileSystem(sc.hadoopConfiguration)
    if (fileSystem.exists(path)) {
      fileSystem.delete(path, true)
    }
  }
}

object OutputComponent {
  def fromOutPutFileType(fileType: String) = {
    if (fileType.equals("parquet")) {
      new ParquetFileOutput
    } else {
      new TextFileOutput
    }
  }
}

/**
 * 写parquet文件
 */
class ParquetFileOutput extends OutputComponent {
  /**
   * 保存结果数据的方法
   *
   * @param sc
   * @param baseOutputPath
   * @param parsedLogRDD
   * @param cookieLabeledSessionRDD
   */
  override def writeOutputData(sc: SparkContext, baseOutputPath: String,
                               parsedLogRDD: RDD[TrackerLog],
                               cookieLabeledSessionRDD: RDD[TrackerSession]): Unit = {

    super.writeOutputData(sc, baseOutputPath, parsedLogRDD, cookieLabeledSessionRDD)

    // 6. 保存结果数据
    // 6.1 保存TrackerLog --> parsedLogRDD
    val trackerLogOutputPath = s"${baseOutputPath}/trackerLog"

    AvroWriteSupport.setSchema(sc.hadoopConfiguration, TrackerLog.SCHEMA$)
    parsedLogRDD.map((null, _)).saveAsNewAPIHadoopFile(trackerLogOutputPath,
      classOf[Void], classOf[TrackerLog], classOf[AvroParquetOutputFormat[TrackerLog]])

    // 6.2 保存TrackerSession --> cookieLabeledSessionRDD
    val trackerSessionOutputPath = s"${baseOutputPath}/trackerSession"

    AvroWriteSupport.setSchema(sc.hadoopConfiguration, TrackerSession.SCHEMA$)
    cookieLabeledSessionRDD.map((null, _)).saveAsNewAPIHadoopFile(trackerSessionOutputPath,
      classOf[Void], classOf[TrackerSession], classOf[AvroParquetOutputFormat[TrackerSession]])
  }
}

class TextFileOutput extends OutputComponent {
  /**
   * 保存结果数据的方法
   *
   * @param sc
   * @param baseOutputPath
   * @param parsedLogRDD
   * @param cookieLabeledSessionRDD
   */
  override def writeOutputData(sc: SparkContext, baseOutputPath: String,
                               parsedLogRDD: RDD[TrackerLog],
                               cookieLabeledSessionRDD: RDD[TrackerSession]): Unit = {

    super.writeOutputData(sc, baseOutputPath, parsedLogRDD, cookieLabeledSessionRDD)

    // 6. 保存结果数据
    // 6.1 保存TrackerLog --> parsedLogRDD
    val trackerLogOutputPath = s"${baseOutputPath}/trackerLog"

    parsedLogRDD.saveAsTextFile(trackerLogOutputPath)

    // 6.2 保存TrackerSession --> cookieLabeledSessionRDD
    val trackerSessionOutputPath = s"${baseOutputPath}/trackerSession"

    cookieLabeledSessionRDD.saveAsTextFile(trackerSessionOutputPath)

  }
}


