from pyspark import SparkContext, SparkConf
from pyspark.streaming import StreamingContext

from pyspark.streaming.kafka import KafkaUtils

if __name__ == '__main__':
    conf = SparkConf().setMaster('local[4]').setAppName('pyspark_kafka')
    sc = SparkContext(conf=conf)
    ssc = StreamingContext(sc, 10)
    ssc.sparkContext.setLogLevel("Error")
    brokers = 'niit01:9092'
    # "auto.offset.reset"="smallest"
    # kafka_streaming_rdd = KafkaUtil    topic = "test"s.createDirectStream(ssc, [topic], {"metadata.broker.list": brokers,
    #                                                                    "auto.offset.reset": "earliest"})
    # kafka_streaming_rdd = KafkaUtils.createDirectStream(ssc, [topic], {"metadata.broker.list": brokers})
    kafka_streaming_rdd = KafkaUtils.createStream(ssc, "niit01:2181",   "3",  {"test": 1})
    #    ssc, [topic], kafkaParams={"metadata.broker.list": brokers}
    # kafka_streaming_rdd = KafkaUtils.createStream(ssc, "niit01:2181","1", {"test": 1}, kafkaParams={  "metadata.broker.list": brokers})

    # kafka_streaming_rdd.print().decode('utf-8').encode('gbk')
    value = kafka_streaming_rdd
    # print ( 'Event recieved in window: ', kafka_streaming_rdd.pprint())
    # kafka_streaming_rdd.foreachRDD(print)
    result = []

    def get_output(_, rdd):
        for event in rdd.collect():
            if len(result) > 0:
                result.append(event)


    kafka_streaming_rdd.foreachRDD(get_output)
    print(result)
    ssc.start()
    ssc.awaitTerminationOrTimeout(10000)
    pass