package com.taoqi.streaming

import java.util.concurrent.Executors
import java.{util, lang => jl}

import com.google.common.collect.{Lists => gl}
import org.apache.commons.lang.StringUtils
import org.apache.hadoop.hbase.client.{ConnectionFactory, Put}
import org.apache.hadoop.hbase.util.Bytes
import org.apache.kafka.common.serialization.{StringDeserializer, StringSerializer}
import org.apache.spark.sql.{DataFrame, Row, SparkSession}
import org.apache.spark.storage.StorageLevel
import org.apache.spark.streaming._
import org.apache.spark.streaming.dstream.DStream
import org.apache.spark.streaming.kafka010.KafkaUtils
import org.apache.spark.{SparkConf, SparkContext}

import scala.collection.mutable.ArrayBuffer
import scala.concurrent.{Await, ExecutionContext, Future}
import scala.language.postfixOps

import scala.concurrent.ExecutionContext.Implicits.global

/**
  * Created by TQ on 2017/10/27.
  */
object MapStateTest {


  def main(args: Array[String]): Unit = {


    val sparkConf = new SparkConf().setAppName("DirectKafkaWordCount")
    val ssc = new StreamingContext(sparkConf, Seconds(2))
    val topicsSet = "gps-data".split(",").toSet
    val kafkaParams = Map[String, String]("metadata.broker.list" -> "10.0.25.57:9092,10.0.25.48:9092,10.0.25.39:9092")

    val messages = KafkaUtils.createDirectStream[String, String](
      ssc,
      LocationStrategies.PreferConsistent,
      ConsumerStrategies.Subscribe[String, String](topicsSet, kafkaParams))

  }
}
