package com.sugon.ww

import java.io._
import java.util.Date

import cn.hutool.core.date.{DatePattern, DateUtil}
import cn.hutool.core.io.FileUtil
import cn.hutool.core.util.ZipUtil
import cn.hutool.extra.ftp.Ftp
import com.alibaba.fastjson.JSON
import com.alibaba.fastjson.serializer.SerializerFeature
import com.sugon.entity._
import org.apache.spark.TaskContext
import org.apache.spark.sql.{SaveMode, SparkSession}

import scala.collection.JavaConverters._
import scala.collection.mutable._

object PushServiceApp {

  private val indexFileName = "GA_ZIP_INDEX.json"
  private val DATA_SENDER_LOGO = "150200220000"
  private val DATA_RECEIVE_LOGO = "150200110000"
  private var CUSTOM_LOGO = "bzysk_v_dsj_cjxx"
  private val SYMBOL = "-"
  private val CODE_NAME = "内蒙古包头市公安局科技信息化支队"
  private val filePath = "/tmp/pushService/"
  private val zipTargetPath = "/tmp/zip"

  //  private val mysqlUrl = "jdbc:mysql://10.0.13.227:3306"
  //  private val mysqlUserName = "root"
  //  private val mysqlPassword = "123456"
  //  private val mysqlDBName = "xuanyuan_test"
  private val mysqlUrl = "jdbc:mysql://182.186.180.59:3306"
  private val mysqlUserName = "root"
  private val mysqlPassword = "Sugon_456"
  private val mysqlDBName = "xuanyuan"

  private val recordsPerFile = 5000
  private val filesPerZip = 50

  private val timestamp = System.currentTimeMillis()

  /**
    * 1. 导入
    * 2. 生成数据文件
    * 3. 完善json 索引文件
    * 4. 打包
    * 5. 上传
    */
  def main(args: Array[String]): Unit = {
    if (args.length < 6) {
      throw new IllegalArgumentException("输入参数错误")
    }
    // 输入参数
    val dbName = args(0)
    val tableName = args(1)
    val ftpUrl = args(2)
    val port = 21
    val ftpUserName = args(3)
    val ftpPassword = args(4)
    val partitionNum = args(5)

    CUSTOM_LOGO = tableName


    val spark = SparkSession.builder().appName("PushServiceApp")
      //      .master("local[*]")
      .enableHiveSupport()
      .getOrCreate()

    // 加载Hive表数据
    val hiveDF = spark.table(s"$dbName.$tableName")
    hiveDF.write.mode(SaveMode.Overwrite).option("sep", "\t").csv("temp")

    val csvRdd = spark.sparkContext.textFile("temp")
    // 获取列名
    val fieldNames = hiveDF.columns

    val descriptionList = getCommonDataDesc(tableName, fieldNames, spark)

    val csvRddRepartition = csvRdd.repartition(partitionNum.toInt)

    // 分区数
    val rddNum = csvRddRepartition.getNumPartitions


    csvRddRepartition.foreachPartition(iterator => {
      // 行数
      var rowNum = 0
      val tmpRowList = new ListBuffer[String]
      val dataFileList = new ListBuffer[DataFile]
      // 文件序号
      var fileIndex = 0
      // zip包序号
      var zipIndex = 0
      for (row <- iterator) {
        rowNum += 1
        val partitionId = TaskContext.getPartitionId()
        tmpRowList.append(row)
        // 每 recordsPerFile 条记录保存一个文件
        if (rowNum % recordsPerFile == 0) {
          fileIndex += 1
          handleFile(getFileName(fileIndex, tableName), tmpRowList, dataFileList)
        }

        // 每 filesPerZip 个文件生成zip 包
        if (rowNum % (recordsPerFile * filesPerZip) == 0) {
          zipIndex = partitionId + 1 + rddNum * (rowNum / recordsPerFile / filesPerZip - 1)
          // 生成索引文件
          val packageHead = getPackageHead(filesPerZip * recordsPerFile, filesPerZip, zipIndex)
          generateIndexFile(packageHead, descriptionList, dataFileList, spark)
          // 打包上传
          handleZip(ftpUrl, port, ftpUserName, ftpPassword, zipIndex)
          dataFileList.clear()
        }
      }
      // 还有剩余数据
      if (tmpRowList.nonEmpty) {
        handleFile(getFileName(fileIndex + 1, tableName), tmpRowList, dataFileList)
      }

      if (dataFileList.nonEmpty) {
        val recordNum = dataFileList.map(_.getDataFileRecordNum).reduce(_ + _)

        val packageHead = getPackageHead(recordNum, dataFileList.length, zipIndex + rddNum)
        generateIndexFile(packageHead, descriptionList, dataFileList, spark)
        // 打包上传
        handleZip(ftpUrl, port, ftpUserName, ftpPassword, zipIndex + rddNum)
      }
    })
  }

  /**
    * 处理 数据文件
    *
    * @param fileName
    * @param tmpRowList
    * @param dataFileList
    */
  def handleFile(fileName: String, tmpRowList: ListBuffer[String], dataFileList: ListBuffer[DataFile]): Unit = {
    val path = filePath + TaskContext.getPartitionId()
    // 写数据到文件并清空tmpRowList
    writeDataFile(tmpRowList, path, fileName)
    // 更新 索引文件
    val dataFile: DataFile = new DataFile
    dataFile.setDataFilePath(fileName)
    dataFile.setDataFileAttachedNum(0)
    dataFile.setDataFileRecordNum(tmpRowList.length)
    dataFileList.append(dataFile)
    // 清空 tmpRowList
    tmpRowList.clear()
  }

  /**
    * 处理zip包
    *
    * @param url
    * @param port
    * @param user
    * @param password
    * @param zipIndex
    */
  def handleZip(url: String, port: Int, user: String, password: String, zipIndex: Int): Unit = {

    val zipSrcPath = filePath + TaskContext.getPartitionId()
    val zipName = zipTargetPath + File.separator + getZipName(zipIndex)

    val dir = new File(filePath)
    if (!dir.exists) {
      dir.mkdirs
    }
    // 打包文件
    ZipUtil.zip(zipSrcPath, zipName)
    // 删除文件
    FileUtil.del(zipSrcPath)

    uploadZip(url, port, user, password, zipName)

    FileUtil.del(zipName)
  }


  /**
    * 生成索引文件
    *
    * @param packageHead
    * @param dataFiles
    * @param spark
    */
  private def generateIndexFile(packageHead: PackageHead, descriptionList: ListBuffer[DataDescription], dataFiles: ListBuffer[DataFile], spark: SparkSession): Unit = {

    // 索引文件
    val indexFile: IndexFile = new IndexFile

    //组装索引文件
    indexFile.setPackageHead(packageHead)
    indexFile.setDataDescriptions(descriptionList.asJava)
    indexFile.setDataFiles(dataFiles.asJava)

    val jsonString = JSON.toJSONString(indexFile, SerializerFeature.PrettyFormat,
      SerializerFeature.WriteMapNullValue, SerializerFeature.WriteDateUseDateFormat)

    val path = filePath + TaskContext.getPartitionId()
    //生成索引文件到指定目录
    FileUtil.writeBytes(jsonString.getBytes(), path + File.separator + indexFileName)

    //    var fos: FileOutputStream = null
    //    try {
    //      fos = new FileOutputStream(filePath + File.separator + indexFileName)
    //      fos.write(jsonString.getBytes)
    //      fos.flush()
    //    } catch {
    //      case e: Exception => e.printStackTrace()
    //    } finally {
    //      if (fos != null) try fos.close()
    //      catch {
    //        case e: IOException =>
    //          e.printStackTrace()
    //      }
    //    }
  }

  /**
    * 获取 索引文件 字段描述部分
    *
    * @param tableName
    * @param fieldNames
    * @param spark
    * @return
    */
  def getCommonDataDesc(tableName: String, fieldNames: Array[String], spark: SparkSession): ListBuffer[DataDescription] = {
    import spark.implicits._
    // 加载MySQL表数据
    val mysqlDataFrameReader = spark.read.format("jdbc").option("url", mysqlUrl)
      .option("user", mysqlUserName)
      .option("password", mysqlPassword)
      .option("driver", "com.mysql.jdbc.Driver")

    val tableInfoDF = mysqlDataFrameReader.option("dbtable", s"$mysqlDBName.t_ds_table_info").load()
    val tableMoreInfoDF = mysqlDataFrameReader.option("dbtable", s"$mysqlDBName.t_ds_sjzyxxsjx").load()
    val columnInfoDF = mysqlDataFrameReader.option("dbtable", s"$mysqlDBName.t_ds_sjzysjxxxsjx").load()
    //包头
    //数据描述
    val dataDescriptionList = new ListBuffer[DataDescription]
    val dataDescription: DataDescription = new DataDescription
    //通过tableId 获取 mysql 中 table_info
    //获取数据资源标识符
    val tableId = tableInfoDF.where($"tb_name" === tableName).first().getAs[String]("id")
    val resourceId = tableMoreInfoDF.where($"tb_id" === tableId).first().getAs[String]("sjzybsf")
    dataDescription.setDataResourceId(resourceId)

    //获取数据项标识符
    val map: Map[String, (String, String)] = Map()
    for (row <- columnInfoDF.where($"tb_id" === tableId).collect()) {
      map.put(Option(row.getAs[String]("sjxbsf")).getOrElse("").toUpperCase,
        (Option(row.getAs[String]("sjxywmc")).getOrElse("").toUpperCase, Option(row.getAs[String]("sjxzwmc")).getOrElse("").toUpperCase))
    }

    val dataItems = new ListBuffer[DataItem]
    fieldNames.map(_.toUpperCase).foreach(field => {
      val dataItem: DataItem = new DataItem
      val tuple = map.get(field)
      dataItem.setIdentifier(tuple.getOrElse(("", ""))._1)
      dataItem.setSjxzwmc(tuple.getOrElse(("", ""))._2)
      dataItems.append(dataItem)
    })
    dataDescription.setDataItems(dataItems.asJava)
    dataDescriptionList.append(dataDescription)
    dataDescriptionList
  }

  /**
    * 生成索引文件头部信息
    *
    * @param recordNum
    * @param fileNum
    * @param zipIndex
    * @param attachedNum
    * @return
    */
  def getPackageHead(recordNum: Int, fileNum: Int, zipIndex: Int, attachedNum: Int = 0): PackageHead = {
    val packageHead = new PackageHead
    packageHead.setVersion("1.00")
    packageHead.setSendUnitCode(DATA_SENDER_LOGO)
    packageHead.setSendUnitName(CODE_NAME)
    packageHead.setRecordNum(recordNum)
    packageHead.setDataFileNum(fileNum)
    packageHead.setAttachedNum(attachedNum)
    packageHead.setMessageSequence(DateUtil.format(new Date, DatePattern.PURE_DATETIME_PATTERN) + "%05d".format(zipIndex))
    packageHead
  }

  /**
    * 写数据到文件
    *
    * @param rows
    * @param fileName
    */
  @throws[IOException]
  def writeDataFile(rows: ListBuffer[String], filePath: String, fileName: String): Unit = {
    val dir = new File(filePath)
    if (!dir.exists) {
      dir.mkdirs
    }

    try {
      val file = FileUtil.newFile(filePath + File.separator + fileName)
      val csvWriter = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(file), "UTF-8"))
      for (row <- rows) {
        csvWriter.write(row)
        csvWriter.newLine()
      }
      csvWriter.close()
    } catch {
      case e: Exception => e.printStackTrace()
    }
  }

  /**
    * 上传zip文件
    *
    * @param url
    * @param port
    * @param user
    * @param password
    * @param filePath
    */
  def uploadZip(url: String, port: Int, user: String, password: String, filePath: String): Unit = {
    try {
      val ftp = new Ftp(url, port, user, password)
      ftp.upload("/", FileUtil.file(filePath))
      //关闭连接
      ftp.close()
    } catch {
      case e: Exception => e.printStackTrace()
    }
  }

  private def getFileName(nameNum: Int, tableName: String) =
    "A" + SYMBOL + DATA_SENDER_LOGO + SYMBOL + "003" + SYMBOL + (timestamp + (nameNum + Math.random()) * 100).toLong + SYMBOL + "%05d".format(nameNum) + SYMBOL + tableName + "_0.bcp"

  private def getZipName(zipIndex: Int) =
    DATA_SENDER_LOGO + SYMBOL + DATA_RECEIVE_LOGO + SYMBOL + (timestamp + (zipIndex + Math.random()) * 10000).toLong + SYMBOL + "%05d".format(zipIndex) + SYMBOL + CUSTOM_LOGO + ".zip"
}
