from pyspark.sql import SparkSession

spark = SparkSession \
    .builder \
    .master('spark://spark-master:7077') \
    .config('spark.executor.memory', '512m') \
    .config('spark.executor.instances', '2') \
    .config('spark.cores.max', '2') \
    .appName("TestApp") \
    .getOrCreate()

df = spark.createDataFrame(
    [
        (1, "foo"),  # create your data here, be consistent in the types.
        (2, "bar"),
    ],
    ["id", "label"]  # add your column names here
)

print(df.count())
