package com.shujia.stream

import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.streaming.{Durations, StreamingContext}
import org.apache.spark.streaming.kafka.KafkaUtils

object Demo8ToHDFS {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()
      .setAppName("stream")
      .setMaster("local[2]") //指定两个线程
      .set("spark.sql.shuffle.partitions", "4")
    val sc = new SparkContext(conf)

    //创建spark streaming上下文对象   指定间隔时间
    val ssc = new StreamingContext(sc, Durations.seconds(5))

    val topics = Map("topic4" -> 2) //2  两个线程读数据

    //连接kafka创建DS  key 偏移量   value  数据
    val kakfaDS = KafkaUtils.createStream(
      ssc,
      "node1:2181,node2:2181,node3:2181",
      "123123",
      topics
    )

    //使用ds api
    //kakfaDS.saveAsTextFiles("data/kafkatoHdfs")

    kakfaDS.foreachRDD(rdd => {
      val time = System.currentTimeMillis()
      rdd.saveAsTextFile("data/out-" + time)
    })

    ssc.start()
    ssc.awaitTermination()
    ssc.stop()

  }
}
