package com.kingsoft.dc.khaos.module.spark.util

import com.alibaba.fastjson.JSONObject
import com.kingsoft.dc.khaos.module.spark.constants.MetaDataConstants
import com.kingsoft.dc.khaos.module.spark.util.DataTypeConvertUtils.{getDate, getDateTime}
import org.apache.commons.lang3.StringUtils
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.Row
import org.apache.spark.sql.types._
import org.json4s.jackson.JsonMethods.parse
import org.json4s.{DefaultFormats, JValue}

/**
  * Created by haorenhui on 2020/04/28.
  */
object RddConvertUtils {

    /** 取出json中的值生成map */
    def convertStandardRDD(valueRDD: RDD[String]): RDD[Map[String, String]] ={
        implicit val formats:DefaultFormats = DefaultFormats
        val rdd: RDD[Map[String, String]] = valueRDD.map(lineData => {
            var colNameAndValue: Map[String, String] = Map[String,String]()
            try{
                //lineData为json字符串
                val value: JValue = parse(lineData, true:Boolean)
                colNameAndValue = value.values.asInstanceOf[Map[String, String]]
            }catch {
                case e:Exception =>
                     //不忽略异常数据 抛错
                     throw new Exception(s"RddConvertUtils 解析json数据失败, line==>[$lineData]",e)
            }
            colNameAndValue
        })
        rdd
    }

    /** 取出每行切分后的值生成Arr */
    def convertValue2keyRDD(valueRDD: RDD[String],delimiter:String,schema:StructType): RDD[Array[String]] ={
        var rdd: RDD[Array[String]] = null
        if(schema.size == 1 || delimiter == ""){
            rdd = valueRDD.map(lineData => {
                Array(lineData)
            })
        }else{
            rdd = valueRDD.map(lineData => {
                lineData.split(delimiter, -1)
            })
        }

        rdd
    }

    /** map转row */
    def standardRDD2Rows(schema: StructType, rdd: RDD[Map[String, String]]): RDD[Row] = {
        rdd.map(map => {
            var row: Row = Row()
            try {
                for (i <- schema.indices) {
                    val colType:DataType = schema(i).dataType
                    val colName:String = schema(i).name
                    //若map中没有对应的字段, 则填充NULL
                    val realValue: String  = map.get(colName) match {
                        case Some(v) => String.valueOf(map(colName))
                        case None => null
                    }
                    row=mergeRow(row,colType,realValue)
                }
            } catch {
                case e:Exception =>
                    throw new Exception(s"=>>> 数据转换失败! line=${map.mkString(" ")}", e)

            }
            row
        })
    }

    /** map转row 单个map转data key为一个字段,value为一个字段 */
    def standardRDD2RowsV2(schema: StructType, rdd: RDD[Map[String, String]]): RDD[Row] = {
        val rowRdd: RDD[Row] = rdd.flatMap(map => {
            val keyDataType: DataType = schema(0).dataType
            val valueDataType: DataType = schema(1).dataType
            var rowList: List[Row] = List.empty[Row]
            try {

                rowList = map.map(kv => {
                    val key: String = kv._1 match {
                        case null => null
                        case _ => kv._1 + ""
                    }
                    val value: String = map.get(kv._1) match {
                        case Some(v) => String.valueOf(map(kv._1))
                        case None => null
                    }
                    var row: Row = Row()
                    row = mergeRow(row, keyDataType, key)
                    row = mergeRow(row, valueDataType, value)
                    row
                }).toList

            } catch {
                case e: Exception =>
                    throw new Exception(s"=>>> 数据转换失败! line=${map.mkString(" ")}", e)

            }
            rowList
        })
        rowRdd
    }

    
    /** arr转row*/
    def value2KeyRDD2Rows(schema: StructType, rdd: RDD[Array[String]],delimiter:String = " "): RDD[Row] = {
        rdd.map(attributes => {
            var row: Row = Row()
            try {
                for (i <- schema.indices) {
                    if(i>=attributes.length){
                        //若根据分隔符切分数据后少于json中配置的字段数, 则后续字段填充NULL
                        row = Row.merge(row, Row(null))
                    }else{
                        val colType:DataType = schema(i).dataType
                        val realValue: String  = attributes(i)
                        row=mergeRow(row,colType,realValue)
                    }

                }
            } catch {
                case e:Exception =>
                    throw new Exception(s"=>>> 数据转换失败! line=${attributes.mkString(delimiter)}", e)

            }
            row
        })
    }


    def mergeRow(lineRow:Row,colType:DataType,realValue:String): Row ={
        var row:Row=lineRow
        colType match {
            case StringType =>
                if (MetaDataConstants.NULL == realValue || null == realValue) {
                    row = Row.merge(row, Row(null))
                } else {
                    row = Row.merge(row, Row(realValue))
                }

            case LongType =>
                if (MetaDataConstants.NULL == realValue || StringUtils.isBlank(realValue) || null == realValue) { //数据中包含NULL值
                    row = Row.merge(row, Row(null))
                } else {
                    row = Row.merge(row, Row(realValue.trim.toLong))
                }

            case DateType =>
                if (MetaDataConstants.NULL == realValue || StringUtils.isBlank(realValue) || null == realValue) {
                    row = Row.merge(row, Row(null))
                } else {
                    row = Row.merge(row, Row(getDate(realValue.trim)))
                }

            case TimestampType =>
                if (MetaDataConstants.NULL == realValue || StringUtils.isBlank(realValue) || null == realValue) {
                    row = Row.merge(row, Row(null))
                } else {
                    row = Row.merge(row, Row(getDateTime(realValue.trim)))
                }

            case DoubleType =>
                if (MetaDataConstants.NULL == realValue || StringUtils.isBlank(realValue) || null == realValue) {
                    row = Row.merge(row, Row(null))
                } else {
                    row = Row.merge(row, Row(realValue.trim.toDouble))
                }

            case NullType => row = Row.merge(row, Row(realValue))
            case _ => row = Row.merge(row, Row(realValue))
        }
        row
    }

}
