package cn.doitedu.day01

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

import java.sql.DriverManager


//使用MapPartitions方法查询MySQL
object T09_QueryCategoryNameV2 {

  def main(args: Array[String]): Unit = {

    val conf = new SparkConf().setAppName("QueryCategoryName")
      .setMaster("local[5]") //如果提交到集群中运行，setMaster必须注释掉

    val sc = new SparkContext(conf)

    val lines: RDD[String] = sc.textFile("data/order.txt")

    val resRDD: RDD[(String, Long, Double, String)] = lines.mapPartitions(it => {
      //先创建一个数据库连接（在Executor中创建）
      Class.forName("com.mysql.cj.jdbc.Driver")
      val connection = DriverManager.getConnection("jdbc:mysql://node-1.51doit.cn:3306/doit?characterEncoding=utf-8", "root", "123456")
      it.map(line => {
        val fields = line.split(",")
        val oid = fields(0)
        val cid = fields(1).toLong
        val money = fields(2).toDouble
        val preparedStatement = connection.prepareStatement("select name from tb_category where id = ?")
        preparedStatement.setLong(1, cid)
        val resultSet = preparedStatement.executeQuery()
        var name: String = null
        if (resultSet.next()) {
          name = resultSet.getString(1)
        }
        resultSet.close()
        preparedStatement.close()
        if (!it.hasNext) {
          connection.close()
        }
        (oid, cid, money, name)
      })
    })

    resRDD.saveAsTextFile("out/out8")
  }

}
