package process
import org.apache.hadoop.fs.{FileSystem, Path}
import org.apache.hadoop.conf.Configuration
import java.io.BufferedReader
import java.io.InputStreamReader
import  scala.collection.mutable.ArrayBuffer
import java.sql.{Connection, DriverManager, ResultSet}
import process.JobHelper.hdfs



object Hdfs:
  val hdfsUri = "hdfs://192.168.10.102:9000/data/"

  val conf = new Configuration()

  def trans2mysql(fileName :String, tableName :String) = 
    val fs = new Path(hdfsUri).getFileSystem(conf)

    val in = fs.open(new Path(hdfsUri+fileName+"/part-r-00000"))
    val bufferedReader = new BufferedReader(new InputStreamReader(in))
    var line = ""
    val data = ArrayBuffer.empty[(String,String)]
    while {line = bufferedReader.readLine(); line != null} do 
      val splits = line.split("\t")
      data.append((splits(0), splits(1)))

    JDBC.insertTable(data, tableName)

    bufferedReader.close 
    fs.close

      

object JDBC:
  val url = "jdbc:mysql://192.168.10.101:3306/boss"
  val username = "hive"
  val pwd = "123456"
  var connection: Connection = null

  def insertTable(data :ArrayBuffer[(String,String)], tableName :String) = 
    try {
      connection = DriverManager.getConnection(url, username, pwd)

      val insertSQL = s"INSERT INTO  $tableName (k, v) VALUES (?, ?)"
      val preparedStatement = connection.prepareStatement(insertSQL)

      data.foreach { 
        tp =>
          preparedStatement.setString(1, tp(0))
          preparedStatement.setInt(2, tp(1).toInt)
          preparedStatement.executeUpdate()
      }
  
    } catch {
      case e: Exception => e.printStackTrace()
    } 
    


  
