package com.fanli.bigdata.mytest

import kafka.serializer.StringDecoder
import org.apache.spark._
import org.apache.spark.streaming._
import org.apache.spark.streaming.kafka.KafkaUtils
import java.sql.{Connection, DriverManager, ResultSet}

object MysqlDemo {
  val conf = new SparkConf().setAppName("MySpakDemo1").setMaster("local[*]")
  val sc = new SparkContext(conf)

  def main (args: Array[String]) {
    kafkaToMysql()
  }

  def mysqlQuery(tkey:String):Unit = {
    val conn_str = "jdbc:mysql://115.159.45.213:3306/test?user=hive&password=hive"
    val conn = DriverManager.getConnection(conn_str)
    val statement = conn.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY)
    //var sql = "SELECT tkey,tvalue FROM test20161026 where tkey = \"" + tkey +"\" LIMIT 5"
    var sql = "SELECT tkey,tvalue FROM test20161026 LIMIT 5"
    val rs = statement.executeQuery(sql)
    while (rs.next) {
      println(rs.getString("tkey") +","+ rs.getString("tvalue") )
    }
    conn.close
  }

  def rddToMysql():Unit = {
    val file=sc.textFile("file://D:/log.txt")
    val word=file.flatMap(line => line.split(" "))
    word.filter(_=="hello").collect
    val rdd=word.map(word => (word,1)).reduceByKey(_+_)
    val data=rdd.collect()

    rdd.foreachPartition(
      it =>{
        var url = "jdbc:mysql://115.159.45.213:3306/test?useUnicode=true&characterEncoding=utf8"
        val conn= DriverManager.getConnection(url,"hive","hive")
        val pstat = conn.prepareStatement ("INSERT INTO `test20161026` (`tkey`, `tvalue`,ins_time) VALUES (?, ?, now())")
        for (obj <-it){
          pstat.setString(1,obj._1)
          pstat.setInt(2,obj._2)
          pstat.addBatch
        }
        try{
          pstat.executeBatch
        }finally{
          pstat.close
          conn.close
        }
      }
    )
  }
  def kafkaToMysql():Unit={
    val ssc = new StreamingContext(sc, Seconds(10))
    val kafkaParams = Map[String, String]("metadata.broker.list" -> "115.159.45.213:9092")
    val topics = "test02"
    val topicsSet = topics.split(",").toSet
    val messages = KafkaUtils.createDirectStream[String, String, StringDecoder, StringDecoder](ssc, kafkaParams, topicsSet)

    val lines = messages.map(_._2)
    val words = lines.flatMap(_.split(" "))
    val wordCounts = words.map(x => (x, 1L)).reduceByKey(_ + _)

    wordCounts.foreachRDD{ rdd =>
      rdd.foreachPartition { it =>
        var url = "jdbc:mysql://115.159.45.213:3306/test?useUnicode=true&characterEncoding=utf8"
        val conn= DriverManager.getConnection(url,"hive","hive")
        val pstat = conn.prepareStatement ("INSERT INTO `test20161026` (`tkey`, `tvalue`,ins_time) VALUES (?, ?, now())")
        for (obj <-it){
          pstat.setString(1,obj._1)
          pstat.setLong(2,obj._2)
          pstat.addBatch
        }
        try{
          pstat.executeBatch
        }finally{
          pstat.close
          conn.close
        }
      }
    }
    ssc.start()
    ssc.awaitTermination()
  }
}
