package com.kingsoft.dc.khaos.module.spark.source

import java.net.URL
import java.util
import java.util.concurrent.TimeUnit

import com.kingsoft.dc.khaos.KhaosContext
import com.kingsoft.dc.khaos.extender.meta.model.col.DmTableColumn
import com.kingsoft.dc.khaos.metadata.{Dependency, KhaosStructField}
import com.kingsoft.dc.khaos.module.spark.constants.{ColumnType, InfluxConstants}
import com.kingsoft.dc.khaos.module.spark.metadata.source.{InfluxExtractFieldInfo, InfluxSourceConfig}
import com.kingsoft.dc.khaos.module.spark.util.MetaUtils
import com.kingsoft.dc.khaos.util.Logging
import okhttp3.OkHttpClient
import org.apache.spark.sql.types._
import org.apache.spark.sql.{Column, DataFrame}
import org.apache.spark.sql.functions.{col, split}
import org.influxdb
//import org.influxdb.InfluxDBFactory
import com.kingsoft.dc.khaos.module.spark.util.InfluxDBFactory
import org.influxdb.dto.Query
import org.json4s.DefaultFormats
import org.json4s.jackson.JsonMethods.{compact, parse, render}

import scala.collection.mutable
import scala.collection.mutable.{ArrayBuffer, ListBuffer}

/**
  * create by yansu on 2019/12/04 16:02
  */
class InfluxSource extends SourceStrategy with Logging with Serializable {
  private var _influxConfig: InfluxSourceConfig = null
  private var _kc: KhaosContext = null

  private var _host: String = null
  private var _port: String = null
  private var _username: String = null
  private var _password: String = null
  private var columnsInfo: util.List[DmTableColumn] = null

  private var _tableNameWithQuote: String = null


  private var _write_timeout = 60000
  private var _read_timeout = 6000
  private var _connect_timeout = 6000
  private var _offset_num = 100000

  override def source(kc: KhaosContext,
                      module_id: String,
                      config: String,
                      dependence: Dependency): DataFrame = {
    implicit val formats = DefaultFormats
    val influxConfig = parse(config, true).extract[InfluxSourceConfig]
    this._influxConfig = influxConfig
    this._kc = kc

    MetaUtils.checkReadAuth(kc,
      influxConfig.db_name,
      influxConfig.table_name,
      influxConfig.extender.auth.clazz,
      compact(render(influxConfig.extender.auth.params)))

    initMetaData()
    loadProperties(kc)
    val database = initDBJavaConnect()
    queryResult(database)
  }

  def initMetaData() = {
    log.info("Initialize physical address!")
    val className = _influxConfig.extender.meta.clazz

    val dbname = _influxConfig.db_name
    val tblname = _influxConfig.table_name
    val influxdbMeta = MetaUtils.getInfluxMeta(_kc,
      dbname,
      tblname,
      className,
      compact(render(_influxConfig.extender.meta.params)),
      this)
    val influxConnect = influxdbMeta.getDsInfluxConnect
    columnsInfo = influxdbMeta.getColumnEntiy()

    val urlStr = influxConnect.getUrl
    _host = urlStr
    _port = analysisUrl(urlStr)._2
    _username = influxConnect.getUsername
    _password = influxConnect.getPassword
    _tableNameWithQuote = processName(_influxConfig.table_name)
  }

  def analysisUrl(urlStr: String): (String, String) = {
    val url = new URL(urlStr)
    val host = url.getHost
    val port = url.getPort.toString
    (host, port)
  }


  def initDBJavaConnect() = {
    log.info("Initialize The InfluxDB Connection!")
    val okclient = new OkHttpClient.Builder()
      .writeTimeout(_write_timeout, TimeUnit.SECONDS)
      .readTimeout(_read_timeout, TimeUnit.SECONDS)
      .connectTimeout(_connect_timeout, TimeUnit.SECONDS)
    val influxDB = InfluxDBFactory.connect(_host, _username, _password, okclient)
    val database: influxdb.InfluxDB = influxDB.setDatabase(_influxConfig.db_name)
    database
  }

  def queryResult(database: influxdb.InfluxDB): DataFrame = {
    log.info("start query data!")
    var querySQL = ""
    val query_count: Double = queryFirst(database)
    var res_df: DataFrame = null
    if (_offset_num >= query_count) {
      // 总数据量小于等于100000
      if (_influxConfig.filter != "" && _influxConfig.filter != null) {
        querySQL = s"select * from ${_tableNameWithQuote} where ${_influxConfig.filter}"
      } else {
        querySQL = s"select * from ${_tableNameWithQuote}"
      }
      res_df = getDataFrame(querySQL, database)
    } else { // 总数据量大于100000
      var tmp_num: Double = 0
      var flag = true

      while (flag) {
        if (tmp_num == 0) {
          //第一次取数据
          if (_influxConfig.filter != "" && _influxConfig.filter != null) {
            querySQL = s"select * from ${_tableNameWithQuote} where ${_influxConfig.filter} limit ${_offset_num} "
          } else {
            querySQL = s"select * from ${_tableNameWithQuote} limit ${_offset_num} "
          }
          // 查询数据
          res_df = getDataFrame(querySQL, database)
          tmp_num += _offset_num
        } else {
          if (_influxConfig.filter != "" && _influxConfig.filter != null) {
            querySQL = s"select * from ${_tableNameWithQuote} where ${_influxConfig.filter} limit ${_offset_num} offset ${tmp_num.toInt}"
          } else {
            querySQL = s"select * from ${_tableNameWithQuote} limit ${_offset_num} offset ${tmp_num.toInt}"
          }
          // 查询数据
          res_df = getDataFrame(querySQL, database).union(res_df)
          tmp_num += _offset_num
        }
        if (tmp_num >= query_count) {
          flag = false
        }
      }
    }
    convertDataType(_influxConfig.extract_fields, res_df)
  }

  def queryFirst(database: influxdb.InfluxDB) = {
    import scala.collection.JavaConverters._
    // 类型，字段
    val fieldAndType = columnsInfo.asScala.map(colEntiy => {
//      val colName: String = colEntiy.getColName
//      val colName: String = if(colEntiy.getColName.equalsIgnoreCase("time")) colEntiy.getColName else processName(colEntiy.getColName)
      val colName: String = if(colEntiy.getColName.equalsIgnoreCase("time")) colEntiy.getColName.toLowerCase() else colEntiy.getColName
//      log.info("2 colName:{}", colName)
      var field_type = ""
      colEntiy.getParams.asScala.foreach(map => {
        map.get("pKey") match {
          case "TYPE" => field_type = map.get("pValue")
          case _ =>
        }
      })
      (field_type, colName)
    }).toMap
    // 字段list
    val listBuffer = new ListBuffer[String]()
    for (elem <- fieldAndType) {
      if (elem._1.toLowerCase.equals("field")) {
        listBuffer += elem._2
      }
    }
    var querSQL = ""
    if (_influxConfig.filter != "" && _influxConfig.filter != null) {
      querSQL = s"select count(${listBuffer.head.toString.trim.toLowerCase}) from ${_tableNameWithQuote} where ${_influxConfig.filter}"
    } else {
      querSQL = s"select count(${listBuffer.head.toString.trim.toLowerCase}) from ${_tableNameWithQuote}"
    }
    val query = new Query(querSQL, _influxConfig.db_name)
    val queryResult = database.query(query).getResults
    // TODO 此处需兼容读表为空bug

    var max_count: Double = 0.0

//    log.info("xixi : {}", queryResult.toArray.toString)
    val queryResultOne = queryResult.get(0)
    val series = queryResultOne.getSeries
    if(series!=null && !queryResultOne.getSeries.isEmpty) {
      val seriesOne = queryResultOne.getSeries.get(0)
      val valuesOne = seriesOne.getValues.get(0)
      val count_num: String = valuesOne.toString
//      val count_num: String = queryResult.get(0).getSeries.get(0).getValues.get(0).toString
    val count_double = ListBuffer[Double]()
    val count_num_list = count_num.substring(1, count_num.length - 1).split(", ").toList
    for (i <- 1 to count_num_list.size - 1) {
      count_double += count_num_list(i).toDouble
    }
    if (count_double.isEmpty) {
      throw new IllegalArgumentException("表缺少Field字段!")
    }
    for (elem <- count_double) {
      max_count = math.max(elem, max_count)
    }
    }

    max_count
  }

  /**
    * load config properties 配置
    *
    * @param kc
    */
  def loadProperties(kc: KhaosContext): Unit = {
    val influxProperties: Map[String, String] = kc.conf.getAllWithPrefix("module.influxdb.source.").toMap
    log.info("InfluxSource Properties")
    influxProperties.foreach { case (k, v) => log.info(k + "   " + v) }
    _write_timeout = influxProperties.getOrElse(InfluxConstants.MODULE_INFLUXDB_SOURCE_WRITE_TIMEOUT, "6000").toInt
    _read_timeout = influxProperties.getOrElse(InfluxConstants.MODULE_INFLUXDB_SOURCE_READ_TIMEOUT, "6000").toInt
    _connect_timeout = influxProperties.getOrElse(InfluxConstants.MODULE_INFLUXDB_SOURCE_CONNECT_TIMEOUT, "6000").toInt
    _offset_num = influxProperties.getOrElse(InfluxConstants.MODULE_INFLUXDB_SOURCE_DATA_NUMS, "100000").toInt
  }

  def convertDataType(sinkSchema: List[InfluxExtractFieldInfo], data: DataFrame): DataFrame = {
    log.info("Start Convert DataType!")
    var value: DataFrame = data
    for (ef <- sinkSchema) {
//      val field: String = ef.field.toLowerCase
//      val field: String = "\""+ef.field+"\""
//      val field: String = if(ef.field.equalsIgnoreCase("time")) ef.field else processName(ef.field)
      val field: String = if(ef.field.equalsIgnoreCase("time")) ef.field.toLowerCase() else ef.field
//      log.info("1 field:{}",field)

      val data_type: String = ef.data_type
      value = value.withColumn(field, value.col(field).cast(getDataType(data_type)))
    }
    value
  }

  def getDataFrame(querySQL: String, database: influxdb.InfluxDB) = {
    log.info("Start Structure DataFrame!")
    val queryResult = database.query(new Query(querySQL, _influxConfig.db_name)).getResults
    val series = queryResult.get(0).getSeries.get(0)

    val data_values = series.getValues.toString
    val data_size = data_values.size
    val queryData = data_values.substring(2, data_size - 2)

    val data_colums = series.getColumns.toString
    val column_size = data_colums.size
    val column = data_colums.substring(1, column_size - 1)

    val data_arr: mutable.Buffer[String] = queryData.split("\\]\\,\\ \\[").toBuffer
    val spark = _kc.sparkSession
    import spark.implicits._
    var reslut = data_arr.toDF()
    reslut = reslut.withColumn("_arr_column_", split(reslut.col("value"), "\\,\\ "))
    var tmpNum = 0
    var list_column = ListBuffer[Column]()

    for (elem <- column.split("\\,\\ ", -1)) {
      list_column += col("_arr_column_").getItem(tmpNum).as(elem)
      tmpNum += 1
    }
    reslut = reslut.select(list_column.toList: _*)
    log.info("Succeed Create DataFrame!")
    reslut
  }

  def getDataType(dataType: String): DataType = {
    var value: DataType = null
    value = dataType match {
      case ColumnType.STRING => DataTypes.StringType
      case ColumnType.NUMBER => DataTypes.LongType
      case ColumnType.DATE => DataTypes.DateType
      case ColumnType.DECIMAL => DataTypes.DoubleType
      case ColumnType.TIME => DataTypes.StringType
      case ColumnType.DATETIME => DataTypes.TimestampType
      case "Long" => DataTypes.StringType
      case _ => DataTypes.StringType
    }
    value
  }

  /** 获取上游的Schema */
  override def schema(kc: KhaosContext,
                      config: String,
                      dependence: Dependency): List[KhaosStructField] = {
    val fieldSchema = ArrayBuffer[KhaosStructField]()
    implicit val formats = DefaultFormats
    val influxInfo = parse(config, true).extract[InfluxSourceConfig]
    val extrFields = influxInfo.extract_fields
    for (ef <- extrFields) {
      fieldSchema += KhaosStructField(ef.field, ef.data_type)
    }
    fieldSchema.toList
  }

  def processName(str: String): String = {
    s"""\"${str}\""""
  }
}
