# -*- coding:utf-8 -*-
from pyspark.streaming import StreamingContext
from pyspark.streaming.kafka import KafkaUtils,TopicAndPartition
from pyspark import SparkConf, SparkContext

def show(rdd):
    print(rdd.collect())

if __name__ == "__main__":
    spark_conf = SparkConf().setAppName("KafkaWordCount")
    sc = SparkContext(conf=spark_conf)
    spark_conf.setMaster("local")
    sc.setLogLevel("ERROR")
    ssc = StreamingContext(sc, 10)
    topicPartion  = TopicAndPartition("nnsmk",0)
    fromOffsets={
        topicPartion:320496
    }
    kafkaParams={
        "bootstrap.servers":"10.0.38.14:9092",
        "group.id":"testGroup",
        "auto.offset.reset":"smallest",
        "enable.auto.commit":"true"
    }
    lines = KafkaUtils.createDirectStream(ssc,["nnsmk",],kafkaParams,fromOffsets=fromOffsets).map(lambda  item:item)
    lines.foreachRDD(show)
    ssc.start()
    ssc.awaitTermination()