package com.edata.bigdata.nebula

import com.edata.bigdata.annotation.Edata_Executor
import com.edata.bigdata.nebulasource.{NebulaConnConfig, NebulaReadConfig, NebulaReader, NebulaWriteConfig, NebulaWriter}
import com.edata.bigdata.util.{EDataUtils, Executor}
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{DataFrame, SparkSession}

@Edata_Executor(target = "NEBULAEXECUTOR")
class SparkNebulaExecutor extends Executor {

  override var SESSION: SparkSession = _

  override def initialize(): Unit = {
  }


  override def findDataFromPath[T](path: String, seperator: String)(implicit bean: Manifest[T]): DataFrame = {
    val rdd = SESSION.sparkContext.textFile(path)
    LOGGER.info(s"finding data from ${path}")
    createDataFrame(rdd, seperator)
  }

  override def saveDataToPath[T](data: RDD[String], path: String)(implicit bean: Manifest[T]): Unit = {
    LOGGER.info(s"saving data to ${path}")
    data.saveAsTextFile(path)
  }

  //args(0):MATCH,args(1):RETURN,args(2):CONDITION,OPTIONAL
  override def findData[T](args: String*)(implicit bean: Manifest[T]): DataFrame = {
    val (space, label, label_type, gid, id, col_types) = find_GRAPH_SCHEMA(bean.toString())
    val RDR = "v"
    val cols = col_types.map(c_t => c_t.split(":")(0))
    val ret_col = cols.map(col => {
      if (label_type.equals("VERTEX")) {
        s"${RDR}.${label}.${col}"
      } else {
        s"${RDR}.${col}"
      }
    })
    val MATCH = if (label_type.equals("VERTEX")) s"(${RDR}:${label})" else s"()-[${RDR}:${label}]-()"

    val CONDITION = if (args.isEmpty) "" else s"where ${args.mkString(",")}"
    val NGQL = s"USE ${space};MATCH ${MATCH} ${CONDITION} RETURN ${ret_col.mkString(",")}"
    val connConfig = NebulaConnConfig.builder()
      .withGraphAddr(NEBULA_CONN_GRAPH_ADDRESS)
      .withUser(NEBULA_CONN_GRAPH_USER)
      .withPassword(NEBULA_CONN_GRAPH_PASSWORD)
      .withConnRetry(2)
      .withConnTimeout(3000)
      .build()

    val readConfig = NebulaReadConfig.builder()
      .withSpace(space)
      .withLabel(label)
      .withNoCol(false)
      .withRetCol(cols)
      .withLimit(100)
      .withPartitionNum(10)
      .withNgql(NGQL)
      .build()

    if (label_type.equals("VERTEX")) {
      SESSION.read.nebula(connConfig, readConfig).loadVerticesByNgql()
    } else {
      SESSION.read.nebula(connConfig, readConfig).loadEdgeByNgql()
    }
  }

  override def saveData[T](data: RDD[String], args: String*)(implicit bean: Manifest[T]): Unit = {
    var (space, label, label_type, gid, id, col_types) = find_GRAPH_SCHEMA(bean.toString())
    val df = createDataFrame(data, args(1))
    val connConfig = NebulaConnConfig.builder()
      .withGraphAddr(NEBULA_CONN_GRAPH_ADDRESS)
      .withUser(NEBULA_CONN_GRAPH_USER)
      .withPassword(NEBULA_CONN_GRAPH_PASSWORD)
      .withConnRetry(2)
      .withConnTimeout(6000)
      .build()
    val writeConfig = NebulaWriteConfig.builder()
      .withSpace(space)
      .withLabel(label)
      .withLabelType(label_type)
      .withWriteMode("insert")
      .withGIDField(gid)
      .withSchema(col_types.mkString(","))
      .withGIDAsProp(true)
      .build()
    if (label_type.equals("VERTEX")) {
      df.write.nebula(connConfig, writeConfig).writeVertices()
    } else {
      df.write.nebula(connConfig, writeConfig).writeEdges()
    }
  }


  override def updateData[T](data: RDD[String], args: String*)(implicit bean: Manifest[T]): Unit = ???

  override def findDataById[T](value: String, args: String*)(implicit bean: Manifest[T]): DataFrame = {
    val (space, label, label_type, gid, id, col_types) = find_GRAPH_SCHEMA(bean.toString())
    val RDR = "v"
    val cols = col_types.map(c_t => c_t.split(":")(0))
    val ret_col = cols.map(col => {
      if (label_type.equals("VERTEX")) {
        s"${RDR}.${label}.${col}"
      } else {
        s"${RDR}.${col}"
      }
    })

    val MATCH = if (label_type.equals("VERTEX")) s"(${RDR}:${label})" else s"()-[${RDR}:${label}]-()"
    val idk = if (label_type.equals("VERTEX")) s"${RDR}.${label}.${id.split(":")(0)}" else s"${RDR}.${id.split(":")(0)}"
    val idv = EDataUtils.convertVToQueryStrByDataType(value, id.split(":")(1))
    val CONDITION = if (args.isEmpty) s"where ${idk}==${idv}" else s"where ${idk}==${idv} and ${args.mkString("and")}"

    val NGQL = s"USE ${space};MATCH ${MATCH} ${CONDITION} RETURN ${ret_col.mkString(",")}"

    val connConfig = NebulaConnConfig.builder()
      .withGraphAddr(NEBULA_CONN_GRAPH_ADDRESS)
      .withUser(NEBULA_CONN_GRAPH_USER)
      .withPassword(NEBULA_CONN_GRAPH_PASSWORD)
      .withConnRetry(2)
      .withConnTimeout(30)
      .build()

    val readConfig = NebulaReadConfig.builder()
      .withSpace(space)
      .withLabel(label)
      .withNoCol(false)
      .withRetCol(cols)
      .withLimit(100)
      .withPartitionNum(10)
      .withNgql(NGQL)
      .build()

    if (label_type.equals("VERTEX")) {
      SESSION.read.nebula(connConfig, readConfig).loadVerticesByNgql()
    } else {
      SESSION.read.nebula(connConfig, readConfig).loadEdges()
    }

  }

  override def findDataByProperty[T](key: String, value: String, args: String*)(implicit bean: Manifest[T]): DataFrame = {
    val (space, label, label_type, gid, id, col_types) = find_GRAPH_SCHEMA(bean.toString())
    val RDR = "v"
    val cols = col_types.map(c_t => c_t.split(":")(0))
    val ret_col = cols.map(col => {
      if (label_type.equals("VERTEX")) {
        s"${RDR}.${label}.${col}"
      } else {
        s"${RDR}.${col}"
      }
    })
    val MATCH = if (label_type.equals("VERTEX")) s"(${RDR}:${label})" else s"()-[${RDR}:${label}]-()"
    val propKey = if (label_type.equals("VERTEX")) s"${RDR}.${label}.${key}" else s"${RDR}.${key}"
    val propVal = EDataUtils.convertVToQueryStrByDataType(value, id.split(":")(1))
    val CONDITION = if (args.isEmpty) s"where ${propKey}==${propVal}" else s"where ${propKey}==${propVal} and ${args.mkString("and")}"

    val NGQL = s"USE ${space};MATCH ${MATCH} ${CONDITION} RETURN ${ret_col.mkString(",")}"
    val connConfig = NebulaConnConfig.builder()
      .withGraphAddr(NEBULA_CONN_GRAPH_ADDRESS)
      .withUser(NEBULA_CONN_GRAPH_USER)
      .withPassword(NEBULA_CONN_GRAPH_PASSWORD)
      .withConnRetry(2)
      .withConnTimeout(30)
      .build()

    val readConfig = NebulaReadConfig.builder()
      .withSpace(space)
      .withLabel(label)
      .withNoCol(false)
      .withRetCol(cols)
      .withLimit(100)
      .withPartitionNum(10)
      .withNgql(NGQL)
      .build()

    if (label_type.equals("VERTEX")) {
      SESSION.read.nebula(connConfig, readConfig).loadVerticesByNgql()
    } else {
      SESSION.read.nebula(connConfig, readConfig).loadEdgeByNgql()
    }

  }

  def findVertex[T]()(implicit bean: Manifest[T]): DataFrame = {
    val (space, label, _, gid, id, col_types) = find_GRAPH_SCHEMA(bean.toString())
    val cols = col_types.map(c_t => c_t.split(":")(0))

    val connConfig = NebulaConnConfig.builder()
      .withGraphAddr(NEBULA_CONN_GRAPH_ADDRESS)
      .withUser(NEBULA_CONN_GRAPH_USER)
      .withPassword(NEBULA_CONN_GRAPH_PASSWORD)
      .withConnRetry(2)
      .withConnTimeout(30)
      .build()

    val readConfig = NebulaReadConfig.builder()
      .withSpace(space)
      .withLabel(label)
      .withNoCol(false)
      .withRetCol(cols)
      .withLimit(100)
      .withPartitionNum(10)
      .build()
    SESSION.read.nebula(connConfig, readConfig).loadVertices()
  }

  def findEdge[T]()(implicit bean: Manifest[T]): DataFrame = {
    val (space, label, _, gid, id, col_types) = find_GRAPH_SCHEMA(bean.toString())
    val cols = col_types.map(c_t => c_t.split(":")(0))

    val connConfig = NebulaConnConfig.builder()
      .withGraphAddr(NEBULA_CONN_GRAPH_ADDRESS)
      .withUser(NEBULA_CONN_GRAPH_USER)
      .withPassword(NEBULA_CONN_GRAPH_PASSWORD)
      .withConnRetry(2)
      .withConnTimeout(3000)
      .build()

    val readConfig = NebulaReadConfig.builder()
      .withSpace(space)
      .withLabel(label)
      .withNoCol(true)
      .withRetCol(cols)
      .withLimit(100)
      .withPartitionNum(10)
      .build()
    SESSION.read.nebula(connConfig, readConfig).loadEdges()
  }


}
