//import com.mysql.jdbc.PreparedStatement
//import org.apache.hadoop.shaded.org.jline.builtins.telnet.Connection
//import org.apache.spark.SparkConf
//import org.apache.spark.streaming.{Seconds, StreamingContext}
//
//import java.sql.DriverManager
//
//object SocketToMySQL {
//  def main(args: Array[String]){
//    val conf = new SparkConf()
//      .setAppName("SocketToMySQL")
//      .setMaster("local[2]")
//
//    val ssc = new StreamingContext(conf, Seconds(5))
//    val lines = ssc.socketTextStream("localhost", 9999)
//    val wordCounts = lines.flatMap(_.split(" "))
//      .filter(_.nonEmpty)
//      .map(word => (word, 1))
//      .reduceByKey(_ + _)
//    wordCounts.foreachRDD { rdd =>
//      rdd.foreachPartition { partitionOfRecords =>
//        var connection: Connection = null
//        var preparedStatement: PreparedStatement = null
//        try {
//          val url = "jdbc:mysql://localhost:3306/spark_db"
//          val username = "root"
//          val password = "yourpassword"
//          Class.forName("com.mysql.jdbc.Driver")
//          connection = DriverManager.getConnection(url, username, password)
//          val sql = """
//            INSERT INTO word_counts (word, count)
//            VALUES (?, ?)
//            ON DUPLICATE KEY UPDATE count = count + VALUES(count)
//          """
//          preparedStatement = connection.prepareStatement(sql)
//          partitionOfRecords.foreach { case (word, count) =>
//            preparedStatement.setString(1, word)
//            preparedStatement.setInt(2, count)
//            preparedStatement.executeUpdate()
//          }
//        } catch {
//          case e: Exception => e.printStackTrace()
//        } finally {
//          if (preparedStatement != null) preparedStatement.close()
//          if (connection != null) connection.close()
//        }
//      }
//    }
//    ssc.start()
//    ssc.awaitTermination()
//  }
//}