package sparksqls.datatransfer

import com.sun.tools.javac.code.TypeTag
import org.apache.log4j.Logger
import org.apache.spark.sql.types.{IntegerType, StringType, StructField, StructType}
import org.apache.spark.sql.{DataFrame, Row, SparkSession}

import java.util.Properties
import scala.reflect.runtime.universe._

object WithSQL {
  private val spark: SparkSession = SparkSession
    .builder.appName("connection to MySQL")
    .master("local")
    .getOrCreate()
  // create logger to this level
  private val logger = Logger.getLogger(getClass.getName)
  private val user = "root"
  private val password = "19950623"

  import spark.implicits._

  def main(args: Array[String]): Unit = {
    // use option method to load necessary properties
    // that are essential to connection in JDBC interfaces
    val jdbcDF: DataFrame = loadFromSQL("student", user, password)
    // show loading results
    selectAll(jdbcDF, "student")
    // create data that needs to be inserted
    val array: Array[String] = Array("3 Rongcheng M 26", "4 Guanhua M 27")
    val studentDF: DataFrame = createDF(array)
    writeToSQL(studentDF, user, password)
    val jdbcDF2: DataFrame = loadFromSQL("student", user, password)
    selectAll(jdbcDF2, "student")
  }

  /**
   * loading data from sql database
   * sql deployed on localhost and version is 8.0.27
   *
   * @param table
   * @param user
   * @param pd
   * @return
   */
  def loadFromSQL(table: String, user: String, pd: String): DataFrame = {
    var df: DataFrame = null;
    try {
      df = spark.read.format("jdbc")
        .option("url", "jdbc:mysql://localhost:3306/spark")
        .option("driver", "com.mysql.jdbc.Driver")
        .option("dbtable", table)
        .option("user", user)
        .option("password", pd)
        .load
      logger.info("df successfully loaded!")
      df
    } catch {
      case ex: Exception => {
        ex.printStackTrace()
        logger.error("error occurred!")
        df
      }
    }
  }

  def writeToSQL(df: DataFrame, user: String, pd: String): Unit = {
    // first create connection
    val prop = new Properties()
    prop.put("user", user)
    prop.put("password", pd)
    prop.put("driver", "com.mysql.jdbc.Driver")
    // next append data to sql tables
    df.write.mode("append").jdbc("jdbc:mysql://localhost:3306/spark",
      "spark.student", prop)
  }


  def selectAll(df: DataFrame, table: String): Unit = {
    df.createOrReplaceTempView(table)
    //df.createGlobalTempView(table)
    spark.sql("select * from " + table).show()
  }

  /**
   * this function will take array as input and
   * spark.DataFrame as output
   *
   * @param array
   */
  def createDF(array: Array[String]): DataFrame = {
    // array structure is (id, name, gender, age)
    val studentRDD = spark.sparkContext.parallelize(array).map(_.split(" "))
    // create user-defined implicit data transformation
    // first create schema
    // schema => (int, String, String, int)
    val schema: StructType = StructType(List(
      StructField("id", IntegerType, nullable = true),
      StructField("Name", StringType, nullable = true),
      StructField("Gender", StringType, nullable = true),
      StructField("Age", IntegerType, nullable = true)))
    // then create row object
    val studentRow = studentRDD.map(f => Row(f(0).toInt,
      f(1).toString.trim,
      f(2).toString.trim,
      f(3).toInt))
    spark.createDataFrame(studentRow, schema = schema)
  }

  // def getType[T: TypeTag](x: T): String = typeOf[T].typeSymbol.name.decodedName.toString


}
