package com.kingsoft.dc.khaos.module.spark.sink

import java.net.URL
import java.text.SimpleDateFormat
import java.util
import java.util.concurrent.TimeUnit

import com.kingsoft.dc.khaos.KhaosContext
import com.kingsoft.dc.khaos.extender.meta.model.col.DmTableColumn
import com.kingsoft.dc.khaos.innertype.Schema
import com.kingsoft.dc.khaos.module.spark.constants.{ColumnType, InfluxConstants}
import com.kingsoft.dc.khaos.module.spark.metadata.sink.{ExtractFieldInfo, InfluxExtractFieldInfo, InfluxSinkConfig}
import com.kingsoft.dc.khaos.module.spark.model.center.metric.SyncProcessDataMetric
import com.kingsoft.dc.khaos.module.spark.util.DataframeUtils.getDataType
//import org.influxdb.InfluxDBFactory
import com.kingsoft.dc.khaos.module.spark.util.InfluxDBFactory

import com.kingsoft.dc.khaos.module.spark.util.InfluxUtils.{FieldTypeEnum, TsFormatEnum, TsTypeEnum}
import com.kingsoft.dc.khaos.module.spark.util.{CenterMetricUtils, DataframeUtils, MetaUtils}
import com.kingsoft.dc.khaos.util.Logging
import okhttp3.OkHttpClient
import org.apache.spark.sql.functions.{lit, trim}
import org.apache.spark.sql.types.{DataType, DataTypes, StringType}
import org.apache.spark.sql.{Column, DataFrame, Encoders, Row}
import org.apache.spark.storage.StorageLevel
import org.apache.spark.util.LongAccumulator
import org.influxdb
import org.influxdb.InfluxDB.ConsistencyLevel
import org.influxdb.dto.{BatchPoints, Point}
import org.json4s.jackson.JsonMethods.{compact, render}
import org.json4s.{DefaultFormats, JsonAST}

import scala.collection.JavaConverters._
import scala.collection.immutable
import scala.collection.mutable.{ArrayBuffer, ListBuffer}

/**
  * create by yansu on 2019/12/04 19:53
  */
class InfluxSink extends SinkStrategy with Logging with Serializable {

  private var _influxConfig: InfluxSinkConfig = null

  private var _host: String = null
  private var _port: String = null
  private var _username: String = null
  private var _password: String = null

  private var _write_timeout = 6000
  private var _read_timeout = 6000
  private var _connect_timeout = 6000
  private var _repartition_per_nums = 80
  private var _max_partition_nums = 2000


  @transient
  private var _influxColInfo: util.List[DmTableColumn] = null

//  private var _table_name_with_quote: String = null

  /** 数据输出 */
  override def sink(kc: KhaosContext,
                    module_id: String,
                    config: JsonAST.JObject,
                    schema: Schema,
                    dataSet: DataFrame): Any = {

    implicit val formats = DefaultFormats
    val influxConfig = config.extract[InfluxSinkConfig]
    this._influxConfig = influxConfig

    MetaUtils.checkWriteAuth(kc,
      influxConfig.db_name,
      influxConfig.table_name,
      influxConfig.extender.get.auth.clazz,
      compact(render(influxConfig.extender.get.auth.params)))

    initMetaData(kc)
    loadProperties(kc)
    reslutWrite(kc, dataSet)
  }

  def initMetaData(kc: KhaosContext) = {
    log.info("Initializes The Physical Address!")
    val className = _influxConfig.extender.get.meta.clazz

    val dbname = _influxConfig.db_name
    val tblname = _influxConfig.table_name

    val influxMeta = MetaUtils.getInfluxMeta(kc,
      dbname,
      tblname,
      className,
      compact(render(_influxConfig.extender.get.meta.params)),
      this)


    val influxConnect = influxMeta.getDsInfluxConnect
    _influxColInfo = influxMeta.getColumnEntiy
    val urlStr = influxConnect.getUrl
    _host = urlStr
    _port = analysisUrl(urlStr)._2
    _username = influxConnect.getUsername
    _password = influxConnect.getPassword

//    _table_name_with_quote =   processName(_influxConfig.table_name)

  }

  def analysisUrl(urlStr: String): (String, String) = {
    val url = new URL(urlStr)
    val host = url.getHost
    val port = url.getPort.toString
    (host, port)
  }

  def initDBJavaConnect() = {
    val okclient = new OkHttpClient.Builder()
      .writeTimeout(_write_timeout, TimeUnit.SECONDS)
      .readTimeout(_read_timeout, TimeUnit.SECONDS)
      .connectTimeout(_connect_timeout, TimeUnit.SECONDS)
    val influxDB = InfluxDBFactory.connect(_host, _username, _password, okclient)
    val database: influxdb.InfluxDB = influxDB.setDatabase(_influxConfig.db_name)
    database
  }

  //BatchPoints批量插入
  def reslutWrite(kc: KhaosContext, data: DataFrame) = {
    log.info("Start Write To Influxdb!")
    // set default value
    var dataAfterProcess = setDefaultValue(_influxConfig.extract_fields, _influxColInfo, data)
    // covert data type
    dataAfterProcess = convertDataType(_influxConfig.extract_fields, dataAfterProcess)
    //总的数据条数
    val count_num: Long = dataAfterProcess.persist(StorageLevel.MEMORY_AND_DISK).count
    val (tmpData, accumulator): (DataFrame, LongAccumulator) = DataframeUtils.calculateDataNum(kc, dataAfterProcess, "InfluxDBSink")
    val repartition_num = DataframeUtils.estimateInfluxdbPartitions(kc, tmpData, _influxColInfo, count_num, _repartition_per_nums, _max_partition_nums)
    tmpData.repartition(repartition_num).foreachPartition(partition => {
      val database = initDBJavaConnect()
      val batchPoints = BatchPoints
        .database(_influxConfig.db_name)
        .consistency(ConsistencyLevel.ALL)
        .build()
      partition.foreach(row => {
        //manual/automatic
        if (_influxConfig.write_option.equalsIgnoreCase(TsTypeEnum.AUTOMATIC)) {
          batchPoints.point(automaticSink2influx(row).build())
        } else if (_influxConfig.write_option.equalsIgnoreCase(TsTypeEnum.MANUAL)) {
          batchPoints.point(manualSink2influx(row).build())
        } else {
          throw new IllegalArgumentException("Unknown TimeStamp Type! ")
        }
      })
      database.write(batchPoints)
      database.close()
    }
    )
    println("数据总条数：" + count_num, "分区数：" + repartition_num, "累加器上报条数：" + accumulator.count)
    //上报运维中心指标
    val metric: SyncProcessDataMetric = CenterMetricUtils.buildSyncProcessDataMetric(kc)
    metric.setProcessDataLValue(accumulator.count)
    CenterMetricUtils.reportSyncProcessData(metric, kc)
    dataAfterProcess.unpersist()
  }

  //TODO  Batch定时定量插入(此方法待优化)
  def reslutBatchWrite(kc: KhaosContext, data: DataFrame) = {
    log.info("Start Write To Influxdb!")
    // set default value
    var dataAfterProcess = setDefaultValue(_influxConfig.extract_fields, _influxColInfo, data)
    // covert data type
    dataAfterProcess = convertDataType(_influxConfig.extract_fields, dataAfterProcess)
    //总的数据条数
    //        val count_num: Long = dataAfterProcess.count()
    //        var repartitions_num: Int = Math.floorDiv(count_num, 200000l).toInt
    //        if (repartitions_num < 10) {
    //          repartitions_num = 10
    //        }
    var repartitions_num: Int = kc.conf.getString("renum").toInt
    val (tmpData, accumulator): (DataFrame, LongAccumulator) = DataframeUtils.calculateDataNum(kc, dataAfterProcess, "InfluxDBSink")
    tmpData.repartition(repartitions_num).foreachPartition(partition => {
      val influxdb = initDBJavaConnect()
        .setDatabase(_influxConfig.db_name)
        .setConsistency(ConsistencyLevel.ALL)
        .enableBatch(5000, 1000, TimeUnit.MILLISECONDS)
      var point: Point.Builder = Point.measurement(_influxConfig.table_name)
      partition.foreach(row => {
        //manual/automatic
        if (_influxConfig.write_option.equalsIgnoreCase(TsTypeEnum.AUTOMATIC)) {
          point = automaticSink2influx(row)
        } else if (_influxConfig.write_option.equalsIgnoreCase(TsTypeEnum.MANUAL)) {
          point = manualSink2influx(row)
        } else {
          throw new IllegalArgumentException("Unknown TimeStamp Type! ")
        }
      })
      influxdb.write(point.build())
      influxdb.close()
    }
    )
    //上报运维中心指标
    val metric: SyncProcessDataMetric = CenterMetricUtils.buildSyncProcessDataMetric(kc)
    metric.setProcessDataLValue(accumulator.count)
    CenterMetricUtils.reportSyncProcessData(metric, kc)
  }

  //    val (tmpData, accumulator): (DataFrame, LongAccumulator) = DataframeUtils.calculateDataNum(kc, dataAfterProcess, "InfluxDBSink")
  //    val influxdb = initDBJavaConnect()
  //      .setDatabase(_influxConfig.db_name)
  //      .setConsistency(ConsistencyLevel.ALL)
  //      .enableBatch(2000, 1000, TimeUnit.MILLISECONDS)
  //    var point: Point.Builder = Point.measurement(_influxConfig.table_name)
  //    for (i <- 1 to count_num.toInt) {
  //      tmpData.repartition(10).foreachPartition(partition => {
  //        partition.foreach(row => {
  //          //manual/automatic
  //          if (_influxConfig.write_option.equalsIgnoreCase(TsTypeEnum.AUTOMATIC)) {
  //            point = automaticSink2influx(row)
  //          } else if (_influxConfig.write_option.equalsIgnoreCase(TsTypeEnum.MANUAL)) {
  //            point = manualSink2influx(row)
  //          } else {
  //            throw new IllegalArgumentException("Unknown TimeStamp Type! ")
  //          }
  //        })
  //        point
  //      }
  //      )
  //      influxdb.write(point.build())
  //    }

  /**
    * load config properties 配置
    *
    * @param kc
    */
  def loadProperties(kc: KhaosContext): Unit = {
    val influxProperties: Map[String, String] = kc.conf.getAllWithPrefix("module.influxdb.sink.").toMap
    log.info("InfluxSink Properties")
    influxProperties.foreach { case (k, v) => log.info(k + "   " + v) }
    _write_timeout = influxProperties.getOrElse(InfluxConstants.MODULE_INFLUXDB_SINK_WRITE_TIMEOUT, "6000").toInt
    _read_timeout = influxProperties.getOrElse(InfluxConstants.MODULE_INFLUXDB_SINK_READ_TIMEOUT, "6000").toInt
    _connect_timeout = influxProperties.getOrElse(InfluxConstants.MODULE_INFLUXDB_SINK_CONNECT_TIMEOUT, "6000").toInt
    _repartition_per_nums = influxProperties.getOrElse(InfluxConstants.MODULE_INFLUXDB_SINK_REPARTITION_PER_NUMS, "80").toInt
    _max_partition_nums = influxProperties.getOrElse(InfluxConstants.MODULE_INFLUXDB_SINK_MAX_PARTITION_NUMS, "2000").toInt
  }


  /**
    * 自动写入时间戳
    *
    * @param series 数据
    * @param point1 Point
    * @return Point
    */
  def automaticSink2influx(series: Row): Point.Builder = {
    var point: Point.Builder = Point.measurement(_influxConfig.table_name)

    var fieldNum : Int = 0
    for (elem <- _influxConfig.extract_fields) {
      if (elem.field_type.equalsIgnoreCase(FieldTypeEnum.TIMESTAMP)) {
        // do nothing
      } else if (elem.field_type.equalsIgnoreCase(FieldTypeEnum.TAG)) {
        val value = series.getAs[String](elem.field)
        if (value != "" && value != null) {
          point = point.tag(elem.field, value)
        }
      } else if (elem.field_type.equalsIgnoreCase(FieldTypeEnum.FIELD)) {
        if(!elem.from_field.equalsIgnoreCase("")){
          fieldNum = fieldNum +1
        }
        elem.data_type match {
          case ColumnType.STRING => {
            val value = series.getAs[String](elem.field)
            if (value != "" && value != null) {
              point = point.addField(elem.field, value)
            }
          }
          case ColumnType.NUMBER => {
            val value = series.getAs[Long](elem.field)
            if (value != "" && value != null) {
              point = point.addField(elem.field, value)
            }
          }
          case ColumnType.DECIMAL => {
            val value = series.getAs[Double](elem.field)
            if (value != "" && value != null) {
              point = point.addField(elem.field, value)
            }
          }
          case _ => {
            val value = series.getAs[String](elem.field)
            if (value != "" && value != null) {
              point = point.addField(elem.field, value)
            }
          }
        }
      } else throw new IllegalArgumentException("Unknown field type")
    }

    if(fieldNum==0){
      throw new IllegalArgumentException("目标表至少应有一个field字段")
    }
    point
  }

  /**
    * 手动写入时间戳
    *
    * @param series 数据
    * @param point1 Point
    * @return Point
    */
  def manualSink2influx(series: Row): Point.Builder = {
    var point: Point.Builder = Point.measurement(_influxConfig.table_name)
    for (elem <- _influxConfig.extract_fields) {
      var timeStampLong = 0l
      val fn = elem.field
      val dt = elem.data_type
      if (elem.field_type.equalsIgnoreCase(FieldTypeEnum.TIMESTAMP)) {
        dt match {
          case "Long" => {
            val ifHaveTimeStampStr = series.getAs[String](fn).trim
            timeStampLong = convertTimeStampType(ifHaveTimeStampStr)
          }
          case _ => {
            val ifHaveTimeStampLong = series.getAs[String](fn)
            val sdf = new SimpleDateFormat(TsFormatEnum.DATE_TIME)
            timeStampLong = sdf.parse(ifHaveTimeStampLong).getTime
          }
        }
        // 时间戳有空值,则用写入数据库时的时间戳
        point = point.time(timeStampLong, TimeUnit.MILLISECONDS)
      }
    }
    for (elem <- _influxConfig.extract_fields) {
      val fn = elem.field
      val dt = elem.data_type
      val ft = elem.field_type
      if (ft.equalsIgnoreCase(FieldTypeEnum.TAG)) {
        val value = series.getAs[String](fn)
        if (value != "" && value != null) {
          point = point.tag(fn, value)
        }
      } else if (ft.equalsIgnoreCase(FieldTypeEnum.FIELD)) {
        dt match {
          case ColumnType.STRING => {
            val value: String = series.getAs[String](fn)
            if (value != "" && value != null) {
              point = point.addField(fn, value)
            }
          }
          case ColumnType.NUMBER => {
            val value: Long = series.getAs[Long](fn)
            if (value != null) {
              point = point.addField(fn, value)
            }
          }
          case ColumnType.DECIMAL => {
            val value: Double = series.getAs[Double](fn)
            if (value != null) {
              point = point.addField(fn, value)
            }
          }
          case _ => {
            val value = series.getAs[String](fn)
            if (value != "" && value != null) {
              point = point.addField(fn, value)
            }
          }
        }
      }
    }
    point
  }

  def JudgePointWhetherNull(point: Point.Builder): Point.Builder = {
    var notNullPoint: Point.Builder = point
    if (null == point) {
      notNullPoint = Point.measurement(_influxConfig.table_name)
    }
    notNullPoint
  }

  /*
  兼容时间戳格式
   */
  def convertTimeStampType(ifHaveTimeStampStr: String): Long = {
    var timeStampLong = 0L
    if (ifHaveTimeStampStr == null || ifHaveTimeStampStr == "") {
      timeStampLong = -1
    } else {
      // 判断时间戳格式
      if (ifHaveTimeStampStr.contains("T")) {
        // datetime类型含T
        val sdf = new SimpleDateFormat(TsFormatEnum.DATE_TIME_WITH_T);
        timeStampLong = sdf.parse(ifHaveTimeStampStr).getTime
      } else if (ifHaveTimeStampStr.contains(" ")) {
        // datetime类型不含T
        val sdf = new SimpleDateFormat(TsFormatEnum.DATE_TIME);
        timeStampLong = sdf.parse(ifHaveTimeStampStr).getTime
      } else if (ifHaveTimeStampStr.contains("-") && !(ifHaveTimeStampStr.contains("T")) && !(ifHaveTimeStampStr.contains(" "))) {
        // date类型
        val sdf = new SimpleDateFormat(TsFormatEnum.DATE);
        timeStampLong = sdf.parse(ifHaveTimeStampStr).getTime
      } else if (ifHaveTimeStampStr.contains(":") && !(ifHaveTimeStampStr.contains("T")) && !(ifHaveTimeStampStr.contains(" "))) {
        // time类型
        val sdf = new SimpleDateFormat(TsFormatEnum.TIME);
        timeStampLong = sdf.parse(ifHaveTimeStampStr).getTime
      } else {
        // long类型
        timeStampLong = ifHaveTimeStampStr.toLong
      }
    }
    timeStampLong
  }

  def setDefaultValue(sinkSchema: List[InfluxExtractFieldInfo],
                      columnEntiy: util.List[DmTableColumn],
                      data: DataFrame): DataFrame = {
    //生成[字段名,not_null(true/false)]的map
    val fieldAndNotNull: Map[String, String] = columnEntiy.asScala.map(colEntiy => {
      val colName: String = colEntiy.getColName
      var not_null: String = ""
      colEntiy.getParams.asScala.foreach(map => {
        map.get("pKey") match {
          case "NOT_NULL" => not_null = map.get("pValue")
          case _ =>
        }
      })
      (colName, not_null)
    }).toMap
    //只复制有连线关系的Column
    val colArr = new ArrayBuffer[Column]()
    for (ef <- sinkSchema) {
      if (!ef.from_field.trim.equals("")) {
        val to_field: String = ef.field
        val from_field: String = ef.from_field
        colArr += data.col(from_field) as to_field
      }
    }
    // 20200526 全部不连线且默认值为空抛出错误
    if (colArr.isEmpty) {
      var isError = true
      sinkSchema.foreach(schema => {
        if (!"".equals(schema.field_props.default_value)) {
          isError = false
        }
      })
      if (isError) {
        throw new Exception("作业配置异常,目标表没有连接上游字段,且默认值都为空!")
      }
    }

    var value: DataFrame = data.select(colArr: _*)

    //  开始
    val cols = sinkSchema.map(ef => {
      var column: Column = null
      val to_field: String = ef.field
      val data_type = ef.data_type
      val from_field: String = ef.from_field
      val default_value: String = ef.field_props.default_value

      if (!from_field.trim.equals("")) { // 有连线
        //将time类型的值去掉空格，否则写入mysql会报错
        if (data_type.equalsIgnoreCase("TIME")) {
          column = trim(value.col(to_field).cast(StringType)) as to_field
        } else {
          column = value.col(to_field).cast(StringType)
        }

      } else { // 无连线
        // 根据闫肃的实现，并没有这个判断逻辑，因此注释掉
        //        if (!data_type.equalsIgnoreCase("SERIAL4") && !data_type.equalsIgnoreCase("SERIAL8")) {
        //          column = lit(null).cast(StringType).as(to_field)
        //        }

        column = lit(null).cast(StringType).as(to_field)

      }
      column
    })

    value = value.select(cols: _*)

    //  结束

    //对目标DF进行默认值填充和类型转换，填充前需要先转为String类型
    for (ef <- sinkSchema) {
      val to_field: String = ef.field
      val data_type = ef.data_type
      val from_field: String = ef.from_field
      val default_value: String = ef.field_props.default_value

      // xixi 待去掉
      //      if (!from_field.trim.equals("")) {
      //        //将time类型的值去掉空格，否则写入mysql会报错
      //        if (data_type.equalsIgnoreCase("TIME")) {
      //          value = value.withColumn(to_field, trim(value.col(to_field).cast(StringType)))
      //        } else {
      //          value = value.withColumn(to_field, value.col(to_field).cast(StringType))
      //        }
      //      } else {
      //        value = value.withColumn(to_field, lit(null).cast(StringType))
      //      }
      //填充默认值
      if (!default_value.equals("")) {
        value = value.na.fill(default_value, Array.apply(to_field))
      } else if (default_value.equals("") && fieldAndNotNull.apply(to_field).equalsIgnoreCase("true") && from_field.trim.equals("")) {
        log.error(s"目标字段：${to_field}不能为null!")
        throw new Exception(s"目标字段：${to_field}不能为null!")
      }
    }
    value
  }

  // xixi 通过泛型调用DataframeUtils.convertDataType()实现
  def convertDataType(sinkSchema: List[InfluxExtractFieldInfo], data: DataFrame): DataFrame = {
    //    var value: DataFrame = data
    //    for (ef <- sinkSchema) {
    //      val field: String = ef.field
    //      val data_type: String = ef.data_type
    //      value = value.withColumn(field, value.col(field).cast(getDataType(data_type)))
    //    }
    //    value

    val cols: immutable.Seq[Column] = sinkSchema.map((ef: InfluxExtractFieldInfo) => {
      val field: String = ef.field
      val data_type: String = ef.data_type
      data.col(field).cast(getDataType(data_type))
    })

    data.select(cols: _*)
  }

  def getDataType(dataType: String): DataType = {
    var value: DataType = null
    value = dataType match {
      case ColumnType.STRING => DataTypes.StringType
      case ColumnType.NUMBER => DataTypes.LongType
      case ColumnType.DATE => DataTypes.StringType
      case ColumnType.DECIMAL => DataTypes.DoubleType
      case ColumnType.TIME => DataTypes.StringType
      case ColumnType.DATETIME => DataTypes.StringType
      case "Long" => DataTypes.StringType
      case _ => DataTypes.StringType
    }
    value
  }

  def processName(str: String): String = {
    s"""\"${str}\""""
  }
}
