package com.fine.core.stage

import com.alibaba.fastjson.{JSON, JSONArray, JSONObject}
import com.fine.spark.connector.base.beans.input.BaseInputConfig
import com.fine.spark.connector.base.beans.output.BaseOutputConfig
import com.fine.spark.connector.base.beans.process.BaseProcessConfig
import com.fine.spark.connector.base.enums.TagEnum
import com.fine.spark.connector.base.function.{BaseUDAF, BaseUDF}
import com.fine.spark.connector.base.utils.{Logging, ReflectUtils}
import com.fine.spark.connector.base.worker.BaseWorker
import org.apache.hadoop.hive.ql.plan.BaseWork
import org.apache.spark.sql.{SparkSession, functions}
import org.apache.spark.sql.expressions.{Aggregator, UserDefinedAggregateFunction}

import java.util.ServiceLoader
import scala.collection.convert.ImplicitConversions._

/**
 * @author jayce
 * @date 2021/11/21 11:27 AM
 * @version 1.0
 */
object BatchPip extends Logging {

  def processStage(config: JSONObject, tag: TagEnum.Value)(implicit ss: SparkSession) = {
    tag match {
      case TagEnum.INPUTS => {
        val inputs: JSONArray = config.getJSONArray(TagEnum.INPUTS.toString)
        val serviceLoader: ServiceLoader[BaseInputConfig] = ServiceLoader.load(classOf[BaseInputConfig])
        inputs.foreach(inputObj => {
          val obj: JSONObject = inputObj.asInstanceOf[JSONObject]
          val inputType: String = obj.getString("type")
          val inputTypeClass: Class[_ <: BaseInputConfig] = serviceLoader
            .filter(input => input.getType.equals(inputType))
            .map(input => input.getClass).head
          val input: BaseInputConfig = JSON.parseObject(obj.toJSONString,
            inputTypeClass)
          input.check()
          ReflectUtils.apply.getInstance[BaseWorker](input.workerClass).process(input)
        })
      }

      case TagEnum.PROCESSES => {
        val processes: JSONArray = config.getJSONArray(TagEnum.PROCESSES.toString)
        val serviceLoader: ServiceLoader[BaseProcessConfig] = ServiceLoader.load(classOf[BaseProcessConfig])
        processes.foreach(processObj => {
          val obj: JSONObject = processObj.asInstanceOf[JSONObject]
          val processType: String = obj.getString("type")
          val processTypeClass: Class[_ <: BaseProcessConfig] = serviceLoader
            .filter(process => process.getType.equals(processType))
            .map(process => process.getClass).head
          val process: BaseProcessConfig = JSON.parseObject(obj.toJSONString, processTypeClass)
          process.check()
          ReflectUtils.apply.getInstance[BaseWorker](process.workerClass).process(process)
        })
      }

      case TagEnum.OUTPUTS => {
        val outputs: JSONArray = config.getJSONArray(TagEnum.OUTPUTS.toString)
        val serviceLoader: ServiceLoader[BaseOutputConfig] = ServiceLoader.load(classOf[BaseOutputConfig])
        outputs.foreach(outputObj => {
          val obj: JSONObject = outputObj.asInstanceOf[JSONObject]
          val outputType: String = obj.getString("type")
          val outputTypeClass: Class[_ <: BaseOutputConfig] = serviceLoader.filter(output => output.getType.equals(outputType))
            .map(output => output.getClass).head
          val output: BaseOutputConfig = JSON.parseObject(obj.toJSONString, outputTypeClass)
          output.check()
          ReflectUtils.apply.getInstance[BaseWorker](output.workerClass).process(output)
        })
      }

    }
  }

  def startPip(config: JSONObject)(implicit ss: SparkSession) = {
    logger.info(s"pipline start...")
    // 加载udaf
    Option(config.getJSONObject("udaf"))
      .filter((_: JSONObject).nonEmpty)
      .foreach(udafs => {
        udafs.foreach({
          case (udafName, udafCls) => {
            ReflectUtils.apply.getInstance[BaseUDAF](udafCls.toString).registerUDAF(udafName, ss)
          }
        })
      })

    //加载udf
    Option(config.getJSONObject("udf"))
      .filter((_: JSONObject).nonEmpty)
      .foreach(udfs => {
        udfs.foreach({
          case (udfName, udfCls) => {
            ReflectUtils.apply.getInstance[BaseUDF](udfCls.toString).registerUDF(udfName, ss)
          }
        })
      })

    // 加载输入数据，注册成表
    logger.info("----------------------start inputs----------------------")
    processStage(config, TagEnum.INPUTS)
    logger.info("----------------------start processes----------------------")
    processStage(config, TagEnum.PROCESSES)
    logger.info("----------------------start outputs----------------------")
    processStage(config, TagEnum.OUTPUTS)
    logger.info(s"pipline finished.")
  }

}
