package com.kingsoft.dc.khaos.module.spark.sink

import java.util
import java.util.Properties

import com.alibaba.fastjson.JSON
import com.kingsoft.dc.khaos.KhaosContext
import com.kingsoft.dc.khaos.extender.meta.model.col.DmTableColumn
import com.kingsoft.dc.khaos.extender.meta.model.ds.RedisConnect
import com.kingsoft.dc.khaos.innertype.Schema
import com.kingsoft.dc.khaos.module.spark.constants.{ColumnType, MetaDataConstants, RedisConstants, SchedulerConstants}
import com.kingsoft.dc.khaos.module.spark.metadata.sink.{ExtractFieldInfo, RedisKeyMode, RedisSinkConfig, RedisValueMode}
import com.kingsoft.dc.khaos.module.spark.model.MetaDataEntity
import com.kingsoft.dc.khaos.module.spark.model.center.metric.SyncProcessDataMetric
import com.kingsoft.dc.khaos.module.spark.util.{CenterMetricUtils, DataframeUtils, FileConfigReader, MetaUtils, RedisUtils}
import com.kingsoft.dc.khaos.util.Logging
import org.apache.spark.sql
import org.apache.spark.sql.types._
import org.apache.spark.sql.{Column, DataFrame, SparkSession}
import org.json4s.{DefaultFormats, JsonAST}
import redis.clients.jedis.{Jedis, JedisCluster}
import org.apache.spark.sql.functions._
import org.apache.spark.util.LongAccumulator

import scala.collection.JavaConversions._
import scala.collection.mutable.ArrayBuffer
import scala.util.Random


/**
  * Created by haorenhui on 2019/07/02.
  */
class RedisSink extends SinkStrategy with Logging with Serializable {

    //json配置
    private var redisConfig: RedisSinkConfig = _
    private var sparkSession: SparkSession = _
    private var TTL: Int = 0
    //key 分隔符
    private var key_delimiter: String = _
    //key 拼接的字段
    private var key_index: List[(String,String)] = Nil
    private var pre_key: String = ""
    //枚举 string,list,set,hash,zset
    private var value_type: String = _
    //枚举 set,hset,lpush,rpush
    private var value_mode: String = _
    //枚举 standard(标准json模式),value2key
    private var write_mode: String = _
    //value2key模式时的 value分隔符
    private var value_delimiter: String = _
    private var temp_column_name: String = "temp_column"
    private var columnInfoMetaList: List[ExtractFieldInfo] = Nil
    private var dmTableColumnList: util.List[DmTableColumn] = _

    //元数据
    private var redisNodes: String = _
    private var redisPassword: String = _
    private var masterName: String = _
    private var connectType: String = _

    val NA_FILL_NUMERIC = 0
    val NA_FILL_STRING:String = MetaDataConstants.NULL

    //单次push条数
    private var OnceMaxWriteSize: Long = 100000
    //redisProperties
    private var redisProperties: Map[String, String] = Map[String, String]()



    override def sink(kc: KhaosContext,
                      module_id: String,
                      config: JsonAST.JObject,
                      schema: Schema,
                      dataSet: DataFrame): Any = {
        init(kc, config)
        //设置默认值
        var data:DataFrame = setDefaultValue(dataSet)
        data=DataframeUtils.convertDataType(columnInfoMetaList,data)
        //重新组织字段顺序
        data=DataframeUtils.sortDataCol(data,columnInfoMetaList)
        data =  replaceNa(data)
        //写入方式匹配
        write_mode match {
            case RedisConstants.RedisValueEnum.standard => data = convertDataFrame2Json(data)
            case RedisConstants.RedisValueEnum.value2key => data = convertDataFrame2KV(data)
            case RedisConstants.RedisValueEnum.uniqueValue => data = convertDataFrame2Json(data)
            case _ => throw new Exception(s"redis write fail, 不支持的value写入模式$write_mode")
        }
        doWrite(data,kc)
    }

    def init(kc: KhaosContext, config: JsonAST.JObject): Unit = {
        import scala.collection.JavaConverters._
        implicit val formats:DefaultFormats = DefaultFormats
        redisConfig = config.extract[RedisSinkConfig]
        sparkSession = kc.sparkSession
        columnInfoMetaList = redisConfig.extract_fields
        dmTableColumnList = columnInfoMetaList.map(col=>{
            val dmCol = new DmTableColumn
            dmCol.setLength(col.length.getOrElse(""))
            dmCol.setColName(col.field)
            dmCol.setColType(col.data_type)
            dmCol
        }).asJava

        val keyMode: RedisKeyMode = redisConfig.key_mode
        val valueMode: RedisValueMode = redisConfig.value_mode


        key_delimiter = keyMode.key_delimiter.getOrElse("")
        val nameAndType: Map[String, String] = columnInfoMetaList.map(tp=>(tp.field,tp.data_type)).toMap
        try{
            key_index=keyMode.key_index.getOrElse(List[String]()).map(fieldName=>{
                val filedType: String = nameAndType(fieldName)
                (fieldName,filedType)
            })
        }catch {
            case e:Exception =>
                throw new Exception("redisSink key_index 配置有误",e)
        }

        value_type = valueMode.`type`.get
        value_mode = valueMode.value_mode.getOrElse("rpush")
        write_mode = valueMode.write_mode.get
        value_delimiter = valueMode.value_delimiter.getOrElse("")

        //有表名,则拼接在每个key上
        if (redisConfig.table_name!=null && redisConfig.table_name.nonEmpty)
            pre_key = s"${redisConfig.table_name}"

        //生存时间打开
        if (keyMode.ttl_mode.nonEmpty && keyMode.ttl_mode.get.on_off.nonEmpty && keyMode.ttl_mode.get.on_off.getOrElse("false").toBoolean)
            TTL = keyMode.ttl_mode.get.ttl.get.toInt;TTL=TTL * 60 * 60


        initProperties(kc)
        initMeta(kc)
    }



    def initMeta(kc: KhaosContext): Unit = {
        val PROJECT_ID: Int = kc.conf.getString(SchedulerConstants.PROJECT_ID).toInt
        var metaParamsMap: Map[String, Any] = redisConfig.extender.meta.params.values
        metaParamsMap=metaParamsMap.updated("project_id",PROJECT_ID)
        import org.json4s.native.Json
        import org.json4s.DefaultFormats
        val metaJson: String = Json(DefaultFormats).write(metaParamsMap)

        //获取元数据
        val entity: MetaDataEntity = MetaUtils.getRedisMeta(kc,
            redisConfig.db_name.getOrElse(""),
            redisConfig.table_name,
            redisConfig.extender.meta.clazz,
            metaJson,
            this)
        val redisConnect: RedisConnect = entity.getDsRedisConnect
        connectType = redisConnect.getConnectType
        connectType match {
            case RedisConstants.RedisConnectEnum.cluster => redisNodes = redisConnect.getClusterNodes
            case RedisConstants.RedisConnectEnum.sentinel => redisNodes = redisConnect.getSentinelNodes; masterName = redisConnect.getMasterName
            case RedisConstants.RedisConnectEnum.masterSlave => redisNodes = redisConnect.getMasterNode
            case _ => throw new Exception(s"不支持的redis类型 $connectType")
        }
        if (redisConnect.getPassword != null)
            redisPassword = redisConnect.getPassword

    }

    //初始化redis配置文件
    def initProperties(kc: KhaosContext): Unit ={
        try {
            redisProperties = kc.conf.getAllWithPrefix("module.redis.sink.").toMap
            log.info("redisSink redisProperties")
            redisProperties.foreach {case(k,v) =>log.info(k + "   " + v)}
        } catch {
            case  e:Exception=>
                e.printStackTrace()
                log.error("未读取到redis配置! 改用默认配置")
        }
        OnceMaxWriteSize = redisProperties.getOrElse(RedisConstants.SINK_PER_ONCEMAXWRITE_NUMS,RedisConstants.DEFAULT_SINK_PER_ONCEMAXWRITE_NUMS).toLong
        log.info(s"redis OnceMaxWriteSize : $OnceMaxWriteSize")
    }


    /** 写入redis */
    def doWrite(data: DataFrame,kc:KhaosContext): Any = {
        //重新分区,遍历分区数据时防止出现OOM
        val partitions: Int = DataframeUtils.rePartitions(kc,data,dmTableColumnList)
        val repartitionData:DataFrame=DataframeUtils.repartionDataframe(data,partitions,this)
        val (tmpData,accumulator): (DataFrame, LongAccumulator) = DataframeUtils.calculateDataNum(kc, repartitionData, "RedisSink")
        try {
            value_type match {
                case RedisConstants.RedisKeyEnum.string => writeString(tmpData)
                case RedisConstants.RedisKeyEnum.list => writeList(tmpData)
                case RedisConstants.RedisKeyEnum.set => writeSet(tmpData)
                case RedisConstants.RedisKeyEnum.hash => writeHash(tmpData)
                //case RedisValueEnum.zset => writeZSet(repartitionData)
                case _ => throw new Exception(s"redis write fail, 不支持的value类型$write_mode")
            }

        } catch {
            case e: Exception =>
                e.printStackTrace()
                log.error(s"redis writer 写入失败,失败信息: ${e.getMessage}, 失败原因: ${e.getCause}")
                throw new Exception(s"redis writer 写入失败,失败信息: ${e.getMessage}, 失败原因: ${e.getCause}",e)

        }
        //上报运维中心指标
        val metric: SyncProcessDataMetric = CenterMetricUtils.buildSyncProcessDataMetric(kc)
        metric.setProcessDataLValue(accumulator.value)
        CenterMetricUtils.reportSyncProcessData(metric,kc)
    }

    /** value_mode为json标准模式时调用 */
    def convertDataFrame2Json(data: DataFrame): DataFrame = {
        var result: DataFrame = data

        val colArr: Array[Column] = result.columns.map(col_name => col(col_name))
        //字段类型转为string 且设置默认值
        val columns: Array[String] = result.columns
        //result = result.select(columns.map(col_name => col(col_name).cast(StringType)): _*)
        //result = result.na.fill("",result.schema.fieldNames)
        //防止字段重名
        while (columns.contains(temp_column_name)) {
            temp_column_name += Random.nextInt(10)
        }
        result = result.withColumn(temp_column_name, to_json(struct(colArr: _*)))
        result
    }

    /** value_mode为value转key模式时调用 */
    def convertDataFrame2KV(data: DataFrame): DataFrame = {
        var result: DataFrame = data
        val colArr: Array[Column] = result.columns.map(col_name => col(col_name))
        //字段类型转为string 且设置默认值
        val columns: Array[String] = result.columns
        //result = result.select(columns.map(col_name => col(col_name).cast(StringType)): _*)
        //result = result.na.fill("",result.schema.fieldNames)
        //防止字段重名
        while (columns.contains(temp_column_name)) {
            temp_column_name += Random.nextInt(10)
        }
        result = result.withColumn(temp_column_name, concat_ws(value_delimiter, colArr: _*))
        result
    }

    def writeString(data: DataFrame): Unit = {
        connectType match {
            case RedisConstants.RedisConnectEnum.cluster =>
                data.foreachPartition(iter => {
                    val conn: JedisCluster = RedisUtils.getClusterPool(redisNodes,redisPassword,redisProperties,this)
                    iter.foreach(row => {
                        val key: String = getKey(row)
                        val value: String = row.getAs[String](temp_column_name)
                        if (TTL > 0)
                            conn.setex(key, TTL,value)
                        else
                            conn.set(key, value)
                    })
                })

            case RedisConstants.RedisConnectEnum.sentinel =>
                data.foreachPartition(iter => {
                    val conn: Jedis = RedisUtils.getSentinelPool(redisNodes,redisPassword,masterName,redisProperties,this).getResource
                    iter.foreach(row => {
                        val key: String = getKey(row)
                        val value: String = row.getAs[String](temp_column_name)
                        if (TTL > 0)
                            conn.setex(key, TTL,value)
                        else
                            conn.set(key, value)
                    })

                    conn.close()
                })

            case RedisConstants.RedisConnectEnum.masterSlave =>
                data.foreachPartition(iter => {
                    val conn: Jedis = RedisUtils.getSinglePool(redisNodes,redisPassword,redisProperties,this).getResource
                    iter.foreach(row => {
                        val key: String = getKey(row)
                        val value: String = row.getAs[String](temp_column_name)
                        if (TTL > 0)
                            conn.setex(key, TTL,value)
                        else
                            conn.set(key, value)
                    })

                    conn.close()
                })

        }

    }

    def writeList(data: DataFrame): Unit = {
        connectType match {
            case RedisConstants.RedisConnectEnum.cluster =>
                data.foreachPartition(iter => {
                    val conn: JedisCluster = RedisUtils.getClusterPool(redisNodes,redisPassword,redisProperties,this)

                    val dataArray: ArrayBuffer[String] = ArrayBuffer.empty
                    if(key_index.isEmpty){
                        iter.foreach(row => {
                            val value:String = row.getAs[String](temp_column_name)
                            dataArray.append(value)
                            if(dataArray.size>=OnceMaxWriteSize){
                                bulkWriteListOfCluster(dataArray,conn)
                                dataArray.clear()
                            }
                        })
                        if(dataArray.nonEmpty){
                            bulkWriteListOfCluster(dataArray, conn)
                            dataArray.clear()
                        }
                    }else{
                        iter.foreach(row => {
                            val value:String = row.getAs[String](temp_column_name)
                            val key: String = getKey(row)
                            redisConfig.value_mode.value_mode.get match {
                                case "lpush" => conn.lpush(key, value)
                                case "rpush" => conn.rpush(key, value)
                                case _ => throw new Exception(s"不支持的push方式${redisConfig.value_mode.value_mode}")
                            }
                            if (TTL > 0)
                                conn.expire(key, TTL)
                        })
                    }

                })

            case RedisConstants.RedisConnectEnum.sentinel =>
                data.foreachPartition(iter => {
                    val conn: Jedis = RedisUtils.getSentinelPool(redisNodes,redisPassword,masterName,redisProperties,this).getResource
                    val dataArray: ArrayBuffer[String] = ArrayBuffer.empty
                    if(key_index.isEmpty){
                        iter.foreach(row => {
                            val value:String = row.getAs[String](temp_column_name)
                            dataArray.append(value)
                            if(dataArray.size>=OnceMaxWriteSize){
                                bulkWriteListOfSingle(dataArray,conn)
                                dataArray.clear()
                            }
                        })
                        if(dataArray.nonEmpty){
                            bulkWriteListOfSingle(dataArray, conn)
                            dataArray.clear()
                        }
                    } else{
                        iter.foreach(row => {
                            val value:String = row.getAs[String](temp_column_name)
                            val key: String = getKey(row)
                            redisConfig.value_mode.value_mode.get match {
                                case "lpush" => conn.lpush(key, value)
                                case "rpush" => conn.rpush(key, value)
                                case _ => throw new Exception(s"不支持的push方式${redisConfig.value_mode.value_mode}")
                            }
                            if (TTL > 0)
                                conn.expire(key, TTL)
                        })
                    }
                    conn.close()
                })

            case RedisConstants.RedisConnectEnum.masterSlave =>
                data.foreachPartition(iter => {
                    val conn: Jedis = RedisUtils.getSinglePool(redisNodes,redisPassword,redisProperties,this).getResource
                    val dataArray: ArrayBuffer[String] = ArrayBuffer.empty
                    if (key_index.isEmpty) {
                        iter.foreach(row => {
                            val value: String = row.getAs[String](temp_column_name)
                            dataArray.append(value)
                            if (dataArray.size >= OnceMaxWriteSize) {
                                bulkWriteListOfSingle(dataArray, conn)
                                dataArray.clear()
                            }
                        })
                        if(dataArray.nonEmpty){
                            bulkWriteListOfSingle(dataArray, conn)
                            dataArray.clear()
                        }
                    }
                    else {
                        iter.foreach(row => {
                            val value: String = row.getAs[String](temp_column_name)
                            val key: String = getKey(row)
                            redisConfig.value_mode.value_mode.get match {
                                case "lpush" => conn.lpush(key, value)
                                case "rpush" => conn.rpush(key, value)
                                case _ => throw new Exception(s"不支持的push方式${redisConfig.value_mode.value_mode}")
                            }
                            if (TTL > 0)
                                conn.expire(key, TTL)
                        })
                        conn.close()
                    }

                })
        }

    }

    def writeSet(data: DataFrame): Unit = {
        connectType match {
            case RedisConstants.RedisConnectEnum.cluster =>
                data.foreachPartition(iter => {
                    val conn: JedisCluster = RedisUtils.getClusterPool(redisNodes,redisPassword,redisProperties,this)
                    val dataArray: ArrayBuffer[String] = ArrayBuffer.empty

                    if(key_index.isEmpty){
                        iter.foreach(row => {
                            val value:String = row.getAs[String](temp_column_name)
                            dataArray.append(value)
                            if (dataArray.size >= OnceMaxWriteSize) {
                                bulkWriteSetOfCluster(dataArray, conn)
                                dataArray.clear()
                            }
                        })
                        if(dataArray.nonEmpty){
                            bulkWriteSetOfCluster(dataArray, conn)
                            dataArray.clear()
                        }
                    } else{
                        iter.foreach(row => {
                            val value:String = row.getAs[String](temp_column_name)
                            val key: String = getKey(row)
                            conn.sadd(key, value)
                            if (TTL > 0)
                                conn.expire(key, TTL)
                        })
                    }
                })

            case RedisConstants.RedisConnectEnum.sentinel =>
                data.foreachPartition(iter => {
                    val conn: Jedis = RedisUtils.getSentinelPool(redisNodes,redisPassword,masterName,redisProperties,this).getResource
                    val dataArray: ArrayBuffer[String] = ArrayBuffer.empty
                    if(key_index.isEmpty){
                        iter.foreach(row => {
                            val value:String = row.getAs[String](temp_column_name)
                            dataArray.append(value)
                            if (dataArray.size >= OnceMaxWriteSize) {
                                bulkWriteSetOfSingle(dataArray, conn)
                                dataArray.clear()
                            }
                        })
                        if(dataArray.nonEmpty){
                            bulkWriteSetOfSingle(dataArray, conn)
                            dataArray.clear()
                        }
                    }else{
                        iter.foreach(row => {
                            val value:String = row.getAs[String](temp_column_name)
                            val key: String = getKey(row)
                            conn.sadd(key, value)
                            if (TTL > 0)
                                conn.expire(key, TTL)
                        }
                        )}
                    conn.close()
                })

            case RedisConstants.RedisConnectEnum.masterSlave =>
                data.foreachPartition(iter => {
                    val conn: Jedis = RedisUtils.getSinglePool(redisNodes,redisPassword,redisProperties,this).getResource
                    val dataArray: ArrayBuffer[String] = ArrayBuffer.empty
                    if(key_index.isEmpty){
                        iter.foreach(row => {
                            val value:String = row.getAs[String](temp_column_name)
                            dataArray.append(value)
                            if (dataArray.size >= OnceMaxWriteSize) {
                                bulkWriteSetOfSingle(dataArray, conn)
                                dataArray.clear()
                            }
                        })
                        if(dataArray.nonEmpty){
                            bulkWriteSetOfSingle(dataArray, conn)
                            dataArray.clear()
                        }
                    }else{
                        iter.foreach(row => {
                            val value:String = row.getAs[String](temp_column_name)
                            val key: String = getKey(row)
                            conn.sadd(key, value)
                            if (TTL > 0)
                                conn.expire(key, TTL)
                        })
                    }
                    conn.close()
                })

        }


    }

    def writeHash(data: DataFrame): Unit = {
        val inner_key: String = redisConfig.value_mode.inner_key.getOrElse("")
        val data_type: String = columnInfoMetaList.find(_.field==inner_key).getOrElse(ExtractFieldInfo(inner_key,"STRING",Option("10"),"",null)).data_type
        connectType match {
            case RedisConstants.RedisConnectEnum.cluster =>
                data.foreachPartition(iter => {
                    val conn: JedisCluster = RedisUtils.getClusterPool(redisNodes,redisPassword,redisProperties,this)
                    var dataMap : util.Map[String, String]= new util.HashMap[String,String]()
                    if(key_index.isEmpty){
                        iter.foreach(row=>{
                            val value:String = row.getAs[String](temp_column_name)
                            dataMap=buildMap(dataMap,row,inner_key,data_type,value)
                            if (dataMap.size >= OnceMaxWriteSize) {
                                bulkWriteHashOfCluster(dataMap, conn)
                                dataMap.clear()
                            }
                        })
                        if(dataMap.nonEmpty){
                            bulkWriteHashOfCluster(dataMap, conn)
                            dataMap.clear()
                        }
                    }else{
                        iter.foreach(row => {
                            val key: String = getKey(row)
                            val value:String = row.getAs[String](temp_column_name)
                            dataMap=buildMap(dataMap,row,inner_key,data_type,value)
                            conn.hmset(key, dataMap)
                            if (TTL > 0)
                                conn.expire(key, TTL)
                        })
                    }
                })

            case RedisConstants.RedisConnectEnum.sentinel =>
                data.foreachPartition(iter => {
                    val conn: Jedis = RedisUtils.getSentinelPool(redisNodes,redisPassword,masterName,redisProperties,this).getResource
                    var dataMap : util.Map[String, String]= new util.HashMap[String,String]()
                    if(key_index.isEmpty){
                        iter.foreach(row=>{
                            val value:String = row.getAs[String](temp_column_name)
                            dataMap=buildMap(dataMap,row,inner_key,data_type,value)
                            if (dataMap.size >= OnceMaxWriteSize) {
                                bulkWriteHashOfSingle(dataMap, conn)
                                dataMap.clear()
                            }
                        })
                        if(dataMap.nonEmpty){
                            bulkWriteHashOfSingle(dataMap, conn)
                            dataMap.clear()
                        }
                    }else {
                        iter.foreach(row => {
                            val key: String = getKey(row)
                            val value:String = row.getAs[String](temp_column_name)
                            dataMap=buildMap(dataMap,row,inner_key,data_type,value)
                            conn.hmset(key, dataMap)
                            if (TTL > 0)
                                conn.expire(key, TTL)
                        })
                    }
                    conn.close()
                })

            case RedisConstants.RedisConnectEnum.masterSlave =>
                data.foreachPartition(iter => {
                    val conn: Jedis = RedisUtils.getSinglePool(redisNodes,redisPassword,redisProperties,this).getResource
                    var dataMap : util.Map[String, String]= new util.HashMap[String,String]()
                    if(key_index.isEmpty){
                        iter.foreach(row=>{
                            val value:String = row.getAs[String](temp_column_name)
                            dataMap=buildMap(dataMap,row,inner_key,data_type,value)
                            if (dataMap.size >= OnceMaxWriteSize) {
                                bulkWriteHashOfSingle(dataMap, conn)
                                dataMap.clear()
                            }
                        })
                        if(dataMap.nonEmpty){
                            bulkWriteHashOfSingle(dataMap, conn)
                            dataMap.clear()
                        }
                    }else {
                        iter.foreach(row => {
                            val key: String = getKey(row)
                            val value: String = row.getAs[String](temp_column_name)
                            dataMap=buildMap(dataMap,row,inner_key,data_type,value)
                            conn.hmset(key, dataMap)
                            if (TTL > 0)
                                conn.expire(key, TTL)
                        })
                    }
                    conn.close()
                })

        }


    }

    def writeZSet(data: DataFrame): Unit = {
        // todo 待定
        connectType match {
            case RedisConstants.RedisConnectEnum.cluster =>
            case RedisConstants.RedisConnectEnum.sentinel =>
            case RedisConstants.RedisConnectEnum.masterSlave =>
        }

    }

    /** 生成redis存储的key */
    def getKey(row: sql.Row): String = {
        var key: String = null
        var indexStr: String = ""
        if (key_index.nonEmpty) {
            indexStr = key_index.map(tp => {
                val fieldName: String = tp._1
                val fieldType: String = tp._2
                getColumnValue(row,fieldName,fieldType)
            }).mkString(key_delimiter)
        }
        if(pre_key.nonEmpty && key_index.isEmpty){
            key = pre_key
        }
        else if (pre_key.nonEmpty && !indexStr.equals("")) {
            key = pre_key + "." + indexStr
        } else if (pre_key.isEmpty && !indexStr.equals("")) {
            key = indexStr
        } else if(pre_key.nonEmpty && indexStr.equals("")) {
            key = pre_key + "."
        } else{
            throw new Exception(s"不合法的Key规则:$key  line=${row.mkString(" ")}")
        }

        key
    }

    /** 获得ROW中对应的值的字符串 */
    def getColumnValue(row: sql.Row, column_name: String, data_type: String): String = {
        var value: String = "null"
        data_type match {
            case ColumnType.STRING =>  if(row.getAs[String](column_name) != null)    value= String.valueOf(row.getAs[String](column_name))
            case ColumnType.NUMBER =>  if(row.getAs[Long](column_name) != null)      value= String.valueOf(row.getAs[Long](column_name))
            case ColumnType.DATE =>    if(row.getAs[DateType](column_name) != null)  value= String.valueOf(row.getAs[DateType](column_name))
            //case ColumnType.DECIMAL => if(row.getAs[Double](column_name) != null)    value= String.valueOf(row.getAs[Double](column_name))
            case ColumnType.DECIMAL => if(row.getAs[String](column_name) != null)    value= String.valueOf(row.getAs[String](column_name))
            case ColumnType.TIME =>    if(row.getAs[String](column_name) != null)    value= String.valueOf(row.getAs[String](column_name))
            case ColumnType.DATETIME =>if(row.getAs[String](column_name) != null)    value= String.valueOf(row.getAs[String](column_name))
            case _ =>                  if(row.getAs[NullType](column_name) != null)  value= String.valueOf(row.getAs[NullType](column_name))
        }
        value
    }

    /** rename and 设置默认值 */
    def setDefaultValue(data: DataFrame): DataFrame ={
        //只复制有连线关系的Column
        val colArr = new ArrayBuffer[Column]()
        for (ef <- columnInfoMetaList) {
            if (!ef.from_field.trim.equals("")) {
                val to_field: String = ef.field
                val from_field: String = ef.from_field
                colArr += data.col(from_field) as to_field
            }
        }

        if(colArr.isEmpty){
            var isError=true
            columnInfoMetaList.foreach(schema=>{
                if(!"".equals(schema.field_props.default_value)){
                    isError=false
                }
            })
            if(isError){
                throw new Exception("作业配置异常,目标表没有连接上游字段,且默认值都为空!")
            }
        }

        var value: DataFrame = data.select(colArr: _*)
        val columns: List[Column] = columnInfoMetaList.map((ef: ExtractFieldInfo) => {
            var column: Column = null
            val to_field: String = ef.field
            val data_type: String = ef.data_type
            val from_field: String = ef.from_field
            if (!from_field.trim.equals("")) { // 有连线
                //将time类型的值去掉空格
                if (data_type.equalsIgnoreCase("TIME")) {
                    column = trim(value.col(to_field).cast(StringType)) as to_field
                } else {
                    column = value.col(to_field).cast(StringType)
                }
            } else { // 无连线
                if (!data_type.equalsIgnoreCase("SERIAL4") && !data_type.equalsIgnoreCase("SERIAL8")) {
                    column = lit(null).cast(StringType).as(to_field)
                }
            }
            column
        })
        value = value.select(columns: _*)

        columnInfoMetaList.foreach((excol: ExtractFieldInfo) =>{
            if(excol.field_props.default_value.nonEmpty){
                val field: String = excol.field
                val default_value: String = excol.field_props.default_value
                value = value.na.fill(default_value,Array(field))
            }
        })
        value
    }

    /**
      * 去除dataframe的空值，数字型替换为空值，其余替换为空字符串
      *
      * @param data 待处理的dataframe
      * @return 替换后的dataframe
      */
    def replaceNa(data: DataFrame): DataFrame = {
        var targetData:DataFrame = data
        val colNames:Array[String] = targetData.columns
        val fields:Array[StructField] = targetData.schema.fields
        for (i <- colNames.indices) {
            val field = fields(i)
            val fieldType:DataType = field.dataType
            val colName = colNames(i)
            if (!fieldType.isInstanceOf[NumericType]) {
                targetData = targetData.withColumn(colName, targetData.col(colName).cast(StringType))
            }
        }
        targetData = targetData.na.fill(NA_FILL_NUMERIC)
        targetData = targetData.na.fill(NA_FILL_STRING)
        targetData
    }

    def bulkWriteListOfCluster(dataArray:ArrayBuffer[String],conn:JedisCluster): Unit ={
        if(key_index.isEmpty && dataArray.nonEmpty){
            redisConfig.value_mode.value_mode.get match {
                case "lpush" => conn.lpush(pre_key, dataArray:_*)
                case "rpush" => conn.rpush(pre_key, dataArray:_*)
                case _ => throw new Exception(s"不支持的push方式${redisConfig.value_mode.value_mode}")
            }
            if (TTL > 0)
                conn.expire(pre_key, TTL)
        }
    }

    def bulkWriteListOfSingle(dataArray:ArrayBuffer[String],conn:Jedis): Unit ={
        if(key_index.isEmpty && dataArray.nonEmpty){
            redisConfig.value_mode.value_mode.get match {
                case "lpush" => conn.lpush(pre_key, dataArray:_*)
                case "rpush" => conn.rpush(pre_key, dataArray:_*)
                case _ => throw new Exception(s"不支持的push方式${redisConfig.value_mode.value_mode}")
            }
            if (TTL > 0)
                conn.expire(pre_key, TTL)
        }
    }

    def bulkWriteSetOfCluster(dataArray:ArrayBuffer[String],conn:JedisCluster): Unit ={
        if(key_index.isEmpty && dataArray.nonEmpty){
            conn.sadd(pre_key, dataArray:_*)
            if (TTL > 0)
                conn.expire(pre_key, TTL)
        }
    }

    def bulkWriteSetOfSingle(dataArray:ArrayBuffer[String],conn:Jedis): Unit ={
        if(key_index.isEmpty && dataArray.nonEmpty){
            conn.sadd(pre_key, dataArray:_*)
            if (TTL > 0)
                conn.expire(pre_key, TTL)
        }
    }

    def bulkWriteHashOfCluster(dataMap:util.Map[String, String],conn:JedisCluster): Unit ={
        if(key_index.isEmpty && dataMap.nonEmpty){
            conn.hmset(pre_key, dataMap)
            if (TTL > 0)
                conn.expire(pre_key, TTL)
        }
    }

    def bulkWriteHashOfSingle(dataMap:util.Map[String, String],conn:Jedis): Unit ={
        if(key_index.isEmpty && dataMap.nonEmpty){
            conn.hmset(pre_key, dataMap)
            if (TTL > 0)
                conn.expire(pre_key, TTL)
        }
    }

    def buildMap(dataMap:util.Map[String, String],row:sql.Row,inner_key:String,data_type:String,value:String): util.Map[String, String] ={
        var resultMap : util.Map[String, String] = dataMap
        write_mode match {
            case RedisConstants.RedisValueEnum.standard =>
                resultMap.put(getColumnValue(row,inner_key,data_type), value)
            case RedisConstants.RedisValueEnum.value2key =>
                resultMap.put(getColumnValue(row,inner_key,data_type), value)
            case RedisConstants.RedisValueEnum.uniqueValue =>
                resultMap = JSON.parse(value).asInstanceOf[util.Map[String, String]]
                resultMap=resultMap.map(tp=>(String.valueOf(tp._1),String.valueOf(tp._2)))
            case _ => throw new Exception(s"redis write fail, 不支持的value写入模式$write_mode")
        }
        resultMap
    }
}
