from pyspark.sql import SparkSession

spark = SparkSession \
    .builder \
    .master('spark://spark-master:7077') \
    .config('spark.executor.memory', '512m') \
    .config('spark.executor.instances', '2') \
    .config('spark.cores.max', '2') \
    .appName("TestApp") \
    .getOrCreate()

df = spark.read.csv('/opt/spark-data/test.csv', header=True)

print(df.count())
