package com.kingsoft.dc.khaos.module.spark.util

import java.io.InputStream
import java.util

import com.kingsoft.dc.khaos.KhaosContext
import com.kingsoft.dc.khaos.module.spark.constants.TechCheckConst.FileType
import com.kingsoft.dc.khaos.module.spark.metadata.source.{ExtractInfo, HdfsCheckConfig}
import com.kingsoft.dc.khaos.module.spark.model.TechCheck.{KsyunErrorCode, KsyunTechCheckException, NewTechCheckInfo}
import org.apache.commons.lang.StringUtils
import org.apache.hadoop.fs.FileSystem
import org.apache.spark.rdd.RDD
import org.apache.spark.sql._
import org.dom4j.Element
import org.dom4j.io.SAXReader
import org.json4s.DefaultFormats
import org.json4s.jackson.JsonMethods.{compact, parse, render}

import scala.collection.JavaConverters

/**
  * Created by czc on 2020/12/21.
  */
object TechCheckUtils {

  /**
    * 根据dat路径获取ddl路径
 *
    * @param datPath  demo :/adsfadsf/basdfads/data/20201219/sdadsfasdfasd.dat
    * @return
    */
  def getCheckFileByDat(datPath:String, fileType :FileType): String = {

    val indlast = datPath.substring(0,datPath.lastIndexOf("/")).lastIndexOf("/");
    // 例子 ：/adsfadsf/basdfads/data
    var parentPath1 = datPath.substring(0,indlast)
    // 例子 ： /adsfadsf/basdfads
    val rootPath = parentPath1.substring(0,parentPath1.lastIndexOf("/"))
    rootPath + "/"+ fileType.dir + datPath.substring(indlast,datPath.length-4)+"."+ fileType.typ;
  }

  def getTechCheckFilePath(hdfs:String, projectMd5:String, hdfsCheckConfig :HdfsCheckConfig, fileType:FileType): String = {
    var dir = fileType match {
      case FileType.DATA => hdfsCheckConfig.input_dat_path
      case FileType.DDL => if(StringUtils.isEmpty(hdfsCheckConfig.input_ddl_path)) hdfsCheckConfig.input_dat_path else hdfsCheckConfig.input_ddl_path
      case FileType.XML => if(StringUtils.isEmpty(hdfsCheckConfig.input_xml_path)) hdfsCheckConfig.input_dat_path else hdfsCheckConfig.input_xml_path
    }
    getHdfsPath(hdfs, projectMd5, dir,hdfsCheckConfig.input_dat_name + "." + fileType.typ)
  }

  def getHdfsPath(hdfs:String, projectMd5:String, dir:String,fileName:String): String = {
    var subdir:String = dir
    if(subdir.startsWith("/")) subdir = subdir.substring(1)
    if(subdir.endsWith("/")) subdir = subdir.substring(0,subdir.length-1)
    hdfs + "/user/" + projectMd5 + "/" + subdir + '/' + fileName
  }

  def loadXML(techCheckInfo: NewTechCheckInfo, tInputStringStream: InputStream): NewTechCheckInfo ={
    val reader = new SAXReader
    val doc = reader.read(tInputStringStream)
    val root = doc.getRootElement
    try {
      val file = root.element("file")
      techCheckInfo.setDatatype(file.element("datatype").getTextTrim)
      techCheckInfo.setFilename(file.element("filename").getTextTrim)
      techCheckInfo.setRecordnum(java.lang.Long.parseLong(file.element("recordnum").getTextTrim))
      techCheckInfo.setFilesize(java.lang.Long.parseLong(file.element("filesize").getTextTrim))
      techCheckInfo.setStartTimestamp(file.element("starttimestamp").getTextTrim)
      techCheckInfo.setEndTimestamp(file.element("endtimestamp").getTextTrim)
      techCheckInfo
    } catch {
      case e: Exception =>
        throw new KsyunTechCheckException(KsyunErrorCode.XML_FORMAT,e)
    }
  }

  def loadDDL(tInputStringStream: InputStream): NewTechCheckInfo ={
    val reader = new SAXReader
    val doc = reader.read(tInputStringStream)
    val root = doc.getRootElement
    try {
      val file = root.element("file")
      val techCheckInfo = new NewTechCheckInfo
      techCheckInfo.setTablename(file.element("filename").getTextTrim)
      techCheckInfo.setFieldcount(file.element("fieldcount").getTextTrim.toInt)
      techCheckInfo.setFileversion(file.element("fileversion").getTextTrim)
      techCheckInfo.setIsfixedlength(file.element("isfixedlength").getTextTrim.toInt)
      //字段信息读取
      val fielddescription = file.element("fielddescription")
      val fieldElts = fielddescription.elements
      var elfieldname:Element = null
      var elfieldvalue:Element  = null
      var i = 0
      while ( i < fieldElts.size ) {
        elfieldname = fieldElts.get(i).asInstanceOf[Element]
        elfieldvalue = fieldElts.get(i + 1).asInstanceOf[Element]
        if (elfieldname.getName == "fieldname" && elfieldvalue.getName == "fieldtype")
          techCheckInfo.addField(elfieldname.getTextTrim, elfieldvalue.getTextTrim)
        else
          throw new RuntimeException("DDL解析出错，请检查fielddescription内的字段格式是否正确！")
        i = i + 2
      }
      //主键信息读取
      val keys = file.element("keydescription")
      val keyElts = keys.elements
      for(i <- 0 until keyElts.size) {
        elfieldname = keyElts.get(i).asInstanceOf[Element]
        if (elfieldname.getName == "keyname")
          techCheckInfo.addKey(elfieldname.getTextTrim)
        else
          throw new RuntimeException("DDL解析出错，请检查keydescription内的字段格式是否正确！")
      }
      techCheckInfo
    } catch {
      case e: Exception =>
        throw e
    }
  }

  /**
    * 将 DataFrame 保存为 hdfs 文件 同时指定保存绝对路径 与 分隔符
    *
    * @param dataFrame  需要保存的 DataFrame
    * @param absfilePath 保存保存的路径 （绝对路径）
    * @param splitRex   指定分割分隔符
    * @param saveMode   保存的模式：Append、Overwrite、ErrorIfExists、Ignore
    */
  def saveAsFileAbsPath(dataFrame: DataFrame, absfilePath: String, splitRex: String, saveMode: SaveMode): Unit = {
    dataFrame.sqlContext.sparkContext.hadoopConfiguration.set("mapred.output.compress", "false")
    //为了方便观看结果去掉压缩格式
    val allClumnName = dataFrame.columns.mkString(",")
    val result: DataFrame = dataFrame.selectExpr(s"concat_ws('$splitRex',$allClumnName) as allclumn")
    result.write.mode(saveMode).text(absfilePath)
  }
  //保存检核日志文件
  def saveLogs(rjDF: DataFrame,logList:java.util.List[String], absfilePath: String, splitRex: String,sparkSession:SparkSession, saveMode: SaveMode,charset:String ) = {
    if(logList.size()>0){
      //写入本地数据
      import sparkSession.implicits._
      val logDF = sparkSession.sparkContext.parallelize(
        JavaConverters.asScalaIteratorConverter(logList.iterator).asScala.toSeq).toDF("log")
      saveAsFileAbsPath(logDF, absfilePath, splitRex, saveMode)
      //写入reject日志
      saveAsFileAbsPath(rjDF, absfilePath, splitRex, SaveMode.Append)
    }else{
      saveAsFileAbsPath(rjDF, absfilePath, splitRex, saveMode)
    }
  }

  def getHdfsCheckConfigByKC(kc:KhaosContext ): HdfsCheckConfig ={
    implicit val formats = DefaultFormats
    //将步骤1的参数和步骤3的参数合并，
    val sourceInfo1 = parse(kc.conf.getString("1"), true).extract[ExtractInfo]
    val sourceInfo3 = parse(kc.conf.getString("3"), true).extract[ExtractInfo]
    var config:String = compact(render(sourceInfo1.strategy.config))
    config = config.substring(0,config.length-1) + "," + compact(render(sourceInfo3.strategy.config)).substring(1)
    val hdfsCheckConfig = parse(config , true).extract[HdfsCheckConfig]
    hdfsCheckConfig
  }

}