package com.edata.bigdata.postgre

import com.edata.bigdata.annotation.Edata_Executor
import com.edata.bigdata.util.SQLExecutor
import org.apache.spark.sql.SparkSession

import java.sql.{Connection, DriverManager}

@Edata_Executor(target = "PGEXECUTOR")
class SparkPostgresExecutor extends SQLExecutor {

  override var SESSION: SparkSession = _

  override def initialize(): Unit = {

  }

  def createPostgreConnection(): Connection = {
    val uri = s"${JDBC_PREFIX + JDBC_IP + ":" + JDBC_PORT + "/" + JDBC_DATABASE}?user=${JDBC_USER}&password=${JDBC_PASSWORD}"
    DriverManager.getConnection(uri)
  }

}

