package main.scala.demo

import java.sql.{Connection, DriverManager, PreparedStatement}

import org.apache.log4j.{Level, Logger}
import org.apache.spark.storage.StorageLevel
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.{SparkConf, SparkContext}

/**
  * MyNetworkWordCount
  * 单词计数程序
  *
  * @author zhangyimin
  *         2018-10-12 下午3:35
  * @version 1.0
  */
object MyNetworkWordCount {

  def main(args: Array[String]): Unit = {
    //创建SparkStreaming对象
    Logger.getLogger("org.apache.spark").setLevel(Level.ERROR)
    Logger.getLogger("org.eclipse.jetty.server").setLevel(Level.OFF)
    //local[2]代表核数为2
    //    val spark = SparkSession.builder().appName("MyNetworkWordCount").master("local[2]").getOrCreate()
    //    val sc = spark.sparkContext
    val sparkConf = new SparkConf().setAppName("MyNetworkWordCount").setMaster("local[2]")
    val sparkContext = new SparkContext(sparkConf)
    //每隔3秒采集一次,使用batchDuration
    //两种方式创建StreamingContext对象
    //1
    //    val streamingContext = new StreamingContext(sparkConf, Seconds(3))
    //2
    val streamingContext = new StreamingContext(sparkContext, Seconds(3))
    //设置检查点,配合下面的UpdateStateByKey操作,保存原来的状态
    //    streamingContext.checkpoint("hdfs://10.16.7.36:9000/DStream")

    //创建一个离散流DStream 代表输入的数据流
    val lines = streamingContext.socketTextStream("10.16.7.36", 5678, StorageLevel.MEMORY_ONLY_SER)
    //处理数据
    val words = lines.flatMap(_.split(" "))

    //====================transForm=============
    //使用transForm
    //    val result =words.transform(rdd=>(rdd.map(x=>(x,1))))
    //    val result = words.transform(rdd => (rdd.map((_, 1))))
    //    val result = words.transform(_.map((_, 1)))
    //====================transForm=============

    //====================UpdateStateByKey=============
    //使用UpdateStateByKey进行累加
    //    val updateFunc = (currVal: Seq[Int], preValueState: Option[Int]) => {
    //      val currValTotal = currVal.sum
    //      //执行累加,从0开始累加
    //      val totalValue = preValueState.getOrElse(0)
    //      Some(currValTotal + totalValue)
    //    }
    //    val result = words.map(x => (x, 1)).updateStateByKey(updateFunc)
    //====================UpdateStateByKey=============


    //=========窗口操作============
    //窗口的长度必须是滑动距离的整数倍,以毫秒为单位,
    // 通过程序运行后发现,30秒后数据被清空,每3秒程序就不停的发送RDD
    //    val wordPair = words.map(x => (x, 1))
    //    val result = wordPair.reduceByKeyAndWindow((a:Int,b:Int)=>(a+b),Seconds(30),Seconds(3))
    //    val result = wordPair.reduceByKeyAndWindow(_+_,Seconds(30),Seconds(3))
    //=========窗口操作============


    //=========接收器:文件流==============
    //监听一个目录下的新增的文件,并读取出来,不能监听到原有的文件新增的内容
    //    val result = streamingContext.textFileStream("/Users/zhangyimin/training/data/input")
    //=========接收器:文件流==============


    val result = words.map(x => (x, 1)).reduceByKey(_ + _)

    //将数据保存到MYSQL数据库,针对分区使用foreachPartition
    result.foreachRDD(rdd=>rdd.foreachPartition(partitionRDD=>(ActiveWebRDD2Mysql.save2MySQL(partitionRDD))))

    //输出结果
    result.print()
    //启动streamingContext.开始执行计算
    streamingContext.start()
    //等待计算完成
    streamingContext.awaitTermination()
  }









}
