

package cn.spark.study.sql

import org.apache.spark.SparkContext
import org.apache.spark.sql.SQLContext
import java.sql.DriverManager

object JDBCDataSource {
  def main(args: Array[String]): Unit = {
    val sc = new ContextUtil().getContext("JDBCDataSource")
    val sqlC = new SQLContext(sc)
    jdbcDataSource(sc,sqlC)
  }
  
  def jdbcDataSource(sc:SparkContext,sqlC:SQLContext):Unit={
    val student_info = sqlC.read.format("jdbc").options(Map("url"->"jdbc:mysql://spark1:3306/testdb","dbtable"->"student_infos")).load().registerTempTable("student_infos")
    
    val student_score = sqlC.read.format("jdbc").options(Map("url"->"jdbc:mysql://spark1:3306/testdb","dbtable"->"student_scores")).load().registerTempTable("student_scores")
    sqlC.sql("select si.name,si.age,ss.score from student_infos si inner join student_scores ss on si.name = ss.name where ss.score >= 80").rdd.foreach { row =>
      var sql = "insert into good_student_info values('" + row.getAs[String]("name") + "'," + row.getAs[Int]("age") + ","+ row.getAs[Int]("score") + ")" 
      Class.forName("com.mysql.jdbc.Driver");
      val conn = DriverManager.getConnection("jdbc:mysql://spark1:3306/testdb","","");
      val stmt = conn.createStatement();
			stmt.executeUpdate(sql);
			
			if(conn!=null){
			  conn.close()
			}
			if(stmt!=null){
			  stmt.close()
			}
    }

  
  }
}