package bigdata.hourglass.engine

import java.io.{ByteArrayOutputStream, PrintStream}
import java.lang.reflect.Modifier
import java.util.{Map => JMap}

import akka.actor.{Actor, Props}
import bigdata.hourglass.common.Logging
import bigdata.hourglass.common.domain.Bean._
import bigdata.hourglass.common.utils.{ObjGenerator, ZkUtils}
import bigdata.hourglass.engine.core.SparkRuntime
import bigdata.hourglass.engine.main.HourglassEngineApp.schedulerMode
import bigdata.hourglass.engine.repl.Interpreter._
import bigdata.hourglass.engine.repl.SparkInterpreter
import bigdata.hourglass.engine.utils.BatchSQLRunnerEngine
import bigdata.hourglass.utils.MailUtils
import com.alibaba.fastjson.JSON
import org.I0Itec.zkclient.ZkClient
import org.apache.http.client.fluent.Request
import org.apache.http.entity.ContentType
import org.apache.spark.LSQLConf._
import org.apache.spark.SparkConf
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.streaming.StreamingQueryListener

/**
 * @author leo.jie (weixiao.me@aliyun.com)
 * @organization DataReal
 * @version 1.0
 * @website https://www.jlpyyf.com
 * @date 2019/12/8 9:18 下午
 * @since 1.0
 */
class ExeActor(_params: JMap[Any, Any], _interpreter: SparkInterpreter, hourglassSession: HourglassSession, conf: SparkConf)
  extends Actor with Logging {

  var sparkSession: SparkSession = _
  var interpreter: SparkInterpreter = _
  var zkClient: ZkClient = _
  var hourglassExecution: HourglassExecution = _
  var token: String = _
  val params: JMap[Any, Any] = _params

  private val zkValidActorPath = ZkUtils.validEnginePath + "/" + hourglassSession.engineInfo +
    "_" + context.self.path.name

  private val zkAddress = params.getOrDefault(L_SQL_ZK.key, L_SQL_ZK.defaultValueString).toString
  private val initHiveCatalog = conf.getBoolean(INIT_HIVE_CATALOG.key, INIT_HIVE_CATALOG.defaultValue.get)
  private val autoComplete = conf.getBoolean(HIVE_CATALOG_AUTO_COMPLETE.key, HIVE_CATALOG_AUTO_COMPLETE.defaultValue.get)
  private val authEnable = conf.getBoolean(HOURGLASS_AUTH_ENABLE.key, HOURGLASS_AUTH_ENABLE.defaultValue.get)
  private val mailEnable = conf.getBoolean(MAIL_ENABLE.key, MAIL_ENABLE.defaultValue.get)
  private val streamJobMaxAttempts = conf.getInt(STREAM_JOB_MAX_ATTEMPTS.key, STREAM_JOB_MAX_ATTEMPTS.defaultValue.get)


  override def preStart(): Unit = {
    info("Actor Starting ......")
    zkClient = ZkUtils.getZkClient(zkAddress)
    interpreter = _interpreter
    sparkSession = SparkRuntime.getOrCreate(params).createRuntime(conf).newSession()
    if (mailEnable) addListener()
    registerUDF("bigdata.hourglass.engine.utils.SparkUdf")
    ZkUtils.registerActorInEngine(zkClient, zkValidActorPath, hourglassSession.tag, 6000, -1)
  }


  override def postStop(): Unit = {
    info("Actor stopping ......")
    interpreter.close()
    sparkSession.stop()
  }


  override def receive: Receive = {
    case LSql(mode, lSql, variables, _token) =>
      actorWrapper(lSql, variables) { () => {
        var rLSql = lSql
        token = _token
        if (variables != null) {
          val variablesIterators = JSON.parseArray(variables).iterator()
          while (variablesIterators.hasNext) {
            val nObject = JSON.parseObject(variablesIterators.next().toString)
            rLSql = rLSql.replace("${" + nObject.getString("name") + "}", nObject.getString("value"))
          }
        }
        schedulerMode = !schedulerMode //切换调度池
        sparkSession.sparkContext.setLocalProperty("spark.scheduler.pool",
          if (schedulerMode) "pool_fair_1" else "pool_fair_2")
        hourglassExecution = HourglassExecution(lSql = lSql, variables = variables)
        //为当前iql设置groupId
        val groupId = BatchSQLRunnerEngine.getGroupId
        hourglassExecution.engineInfoAndGroupId = hourglassSession.engineInfo + "_" + groupId
        sparkSession.sparkContext.clearJobGroup()
        sparkSession.sparkContext.setJobDescription("rLSql:" + rLSql)
        sparkSession.sparkContext.setJobGroup("rLSqlId:" + groupId, "rLSql:" + rLSql)
        //将该iql任务的唯一标识返回
        sender() ! hourglassExecution
        mode match {
          case SQLMode.CODE =>
            warn("\n" + ("*" * 80) + "\n" + rLSql + "\n" + ("*" * 80))
            hourglassExecution.mode = SQLMode.CODE
            rLSql = rLSql.replaceAll("'", "\"").replaceAll("\n", " ")
            val response = interpreter.execute(rLSql)
            while (!executeIsFinish(response)) {}
          case _ =>
        }
      }
      }
    case GetBatchResult(engineInfoAndGroupId) =>
      if (hourglassSession.batchJob.keySet().contains(engineInfoAndGroupId)) {
        sender() ! hourglassSession.batchJob.get(engineInfoAndGroupId)
        hourglassSession.batchJob.remove(engineInfoAndGroupId)
      } else {
        sender() ! HourglassExecution()
      }
    case _ => None
  }

  def addListener(): Unit = {
    //    val props = ObjGenerator.newProperties(Seq(("mail.smtp.auth", PropsUtils.get("mail.smtp.auth")),
    //      ("mail.smtp.host", PropsUtils.get("mail.smtp.host")), ("mail.smtp.port", PropsUtils.get("mail.smtp.port")),
    //      ("mail.user", PropsUtils.get("mail.user")), ("mail.password", PropsUtils.get("mail.password"))): _*)
    val props = null
    val handleFunc = (start: StreamingQueryListener.QueryStartedEvent,
                      end: StreamingQueryListener.QueryTerminatedEvent) => {
      val streamName = start.name
      val otherMsg =
        if (hourglassSession.streamJobMaxAttempts.containsKey(streamName)
          && hourglassSession.streamJobMaxAttempts.get(streamName) > 0) {
          val restTimes = streamJobMaxAttempts - hourglassSession.streamJobMaxAttempts.get(streamName) + 1
          if (restTimes <= streamJobMaxAttempts) Some(s"""正在尝试第${restTimes}次重启""")
          else None
        } else None
      val receiver = hourglassSession.streamJobWithMailReceiver.get(streamName)
      try {
        if (null != receiver) {
          MailUtils.sendMail(props, Array(receiver, "IQL任务告警",
            s"实时任务：$streamName(${start.id}) 执行失败...\n${otherMsg.getOrElse("")}\n ${end.exception.getOrElse("")}"))
        }
      } catch {
        case e: Exception => error("发送邮件失败...\n" + e)
      }
      if (hourglassSession.streamJobWithDingDingReceiver.contains(streamName)) {
        try {
          Request.Post(s"https://oapi.dingtalk.com/robot/send?access_token=access_token")
            .bodyString(
              s"""
                 |{
                 |     "msgtype": "markdown",
                 |     "markdown": {"title":"IQL任务告警",
                 |     "text":"### IQL任务告警  \n > 实时任务：$streamName（${start.id}）执行失败...\n
                 |     ${otherMsg.getOrElse("")}\n ${end.exception.getOrElse("")}"
                 |     }
                 | }
                        """.stripMargin, ContentType.APPLICATION_JSON)
            .execute().returnContent().asString()
        } catch {
          case e: Exception => error("发送钉钉失败...\n" + e)
        }
      }
      if (hourglassSession.streamJobMaxAttempts.containsKey(streamName) &&
        hourglassSession.streamJobMaxAttempts.get(streamName) > 0) {
        val newQuery = hourglassSession.streamJobWithDataFrame.get(streamName).start()
        hourglassSession.streamJob.put(hourglassSession.engineInfo + "_" + newQuery.name + "_" + newQuery.id, newQuery)
        hourglassSession.streamJobMaxAttempts.put(streamName, hourglassSession.streamJobMaxAttempts.get(streamName) - 1)
      } else {
        hourglassSession.streamJobWithDataFrame.remove(streamName)
        hourglassSession.streamJobMaxAttempts.remove(streamName)
        hourglassSession.streamJobWithMailReceiver.remove(streamName)
        hourglassSession.streamJobWithDingDingReceiver -= streamName
      }
    }
  }

  // 执行前从zk中删除当前对应节点（标记不可用），执行后往zk中写入可用节点（标记可用）
  def actorWrapper(lSql: String, variables: String)(f: () => Unit) {
    ZkUtils.deletePath(zkClient, zkValidActorPath)
    try {
      f()
    } catch {
      case e: Exception =>
        val out = new ByteArrayOutputStream()
        e.printStackTrace(new PrintStream(out))
        hourglassExecution = HourglassExecution(lSql = lSql, variables = variables)
        hourglassExecution.data = new String(out.toByteArray)
        hourglassExecution.dataType = ResultDataType.ERROR_DATA
        sender() ! hourglassExecution
    }
    ZkUtils.registerActorInEngine(zkClient, zkValidActorPath, hourglassSession.tag, 6000, -1)
  }

  def executeIsFinish(response: ExecuteResponse): Boolean = {
    response match {
      case _: ExecuteIncomplete => false
      case e: ExecuteSuccess =>
        val take = (System.currentTimeMillis() - hourglassExecution.startTime.getTime) / 1000
        hourglassExecution.takeTime = take
        hourglassExecution.data = e.content.values.values.mkString("\n")
        hourglassExecution.dataType = ResultDataType.PRE_DATA
        hourglassExecution.status = JobStatus.FINISH
        hourglassSession.batchJob.put(hourglassExecution.engineInfoAndGroupId, hourglassExecution)
        true
      case e: ExecuteError =>
        hourglassExecution.status = JobStatus.FINISH
        hourglassExecution.data = e.eValue
        hourglassExecution.success = false
        hourglassExecution.dataType = ResultDataType.ERROR_DATA
        hourglassSession.batchJob.put(hourglassExecution.engineInfoAndGroupId, hourglassExecution)
        true
      case e: ExecuteAborted =>
        hourglassExecution.status = JobStatus.FINISH
        hourglassExecution.data = e.message
        hourglassExecution.success = false
        hourglassExecution.dataType = ResultDataType.ERROR_DATA
        hourglassSession.batchJob.put(hourglassExecution.engineInfoAndGroupId, hourglassExecution)
        true
      case _ => true
    }
  }

  def registerUDF(clazz: String): Unit = {
    Class.forName(clazz).getMethods.foreach { f =>
      try {
        if (Modifier.isStatic(f.getModifiers)) {
          f.invoke(null, sparkSession)
        }
      } catch {
        case e: Exception => e.printStackTrace()
      }
    }
  }
}

object ExeActor {
  def props(params: JMap[Any, Any], interpreter: SparkInterpreter, hourglassSession: HourglassSession,
            sparkConf: SparkConf): Props = Props(new ExeActor(params, interpreter, hourglassSession, sparkConf))


}