package com.gin.nebula

import com.facebook.thrift.protocol.TCompactProtocol
import com.gin.utils.{Argument, Configs, NebulaClientUdf}
import com.vesoft.nebula.client.graph.data.ResultSet
import org.apache.commons.lang.StringUtils
import org.apache.spark.SparkConf
import org.apache.spark.sql.SparkSession
import org.slf4j.LoggerFactory
import java.util.Date

object NebulaClientExecutor {

  private val LOG = LoggerFactory.getLogger(this.getClass)

  def main(args: Array[String]): Unit = {

    val PROGRAM_NAME = "NebulaClientExecutor"

    val sparkConf = new SparkConf
    sparkConf
      .set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
      .registerKryoClasses(Array[Class[_]](classOf[TCompactProtocol]))
      .setAppName(PROGRAM_NAME)
    val sparkSession: SparkSession = SparkSession
      .builder()
      .master("local")
      .config(sparkConf)
      .getOrCreate()

    //仅打印错误日志
    //sparkSession.sparkContext.setLogLevel("ERROR")

    //自定义入参处理
    if (args.length == 0) {
      LOG.error("args is empty {}", args)
    }
    val options = Configs.parser(args, PROGRAM_NAME)
    val udfArgs: Argument = options match {
      case Some(config) => config
      case _ =>
        LOG.error("Argument parse failed")
        sys.exit(-1)
    }
    val configs = Configs.parse(udfArgs.filePath)
    val nGql = udfArgs.statement
    LOG.info(s"Configs ${configs} \n Statement ${nGql}")
    val ngConf = configs.nebulaConfig

    //获取连接
    val graphProvider = new NebulaClientUdf(ngConf.graphAddress, ngConf.timeout)
    val session = graphProvider.getGraphClient(ngConf.user, ngConf.pwd)
    graphProvider.switchSpace(session, ngConf.space)

    //执行查询语句
    if (StringUtils.isNotEmpty(nGql)) {
      LOG.info("nGql args is {}", nGql)
      val resultSet: ResultSet = graphProvider.submit(session, nGql)
      LOG.info("nebula query resultSet {}", resultSet)
    } else if (StringUtils.isNotEmpty(configs.executorConfig.nGql)) {
      LOG.info("nGql conf is {}", configs.executorConfig.nGql)
      val resultSet: ResultSet = graphProvider.submit(session, configs.executorConfig.nGql)
      LOG.info("nebula query resultSet {}", resultSet)
    } else {
      LOG.error("nGql is empty {}", nGql)
    }

    LOG.info("nebula query end, waiting {}", new Date())
    //nGql执行完成后,spark结束前等待毫秒数
    Thread.sleep(configs.executorConfig.timeWait)
    LOG.info("nebula waiting end {}", new Date())

    //执行完成释放nebula连接
    graphProvider.releaseGraphClient(session)
    graphProvider.close()
    sparkSession.close()
  }


}
