from pyspark import SparkContext, SparkConf
from pyspark.sql import SparkSession, DataFrame


# sc = SparkContext()
conf = SparkConf()
conf.set("spark.jars", "/Users/sonto/Workspace/Rimi/P1902/spark_example/jars/spark-streaming-kafka-0-8-assembly_2.11-2.4.0.jar")
sc = SparkContext.getOrCreate(conf=conf)
assert isinstance(sc, SparkContext)
sc.setLogLevel("ERROR")
ss = SparkSession(sparkContext=sc)
df = ss \
  .readStream \
  .format("kafka") \
  .option("kafka.bootstrap.servers", "10.0.0.252:9092") \
  .option("subscribe", "django-logger").load().selectExpr("CAST(key AS STRING)", "CAST(value AS STRING)").select("value")

dw = df.writeStream.format('console')
dw.start().awaitTermination()
