from pyspark.sql import SparkSession

spark = SparkSession \
    .builder \
    .master('spark://spark-master:7077') \
    .config('spark.jars.packages', 'com.amazonaws:aws-java-sdk-bundle:1.11.469,org.apache.hadoop:hadoop-aws:3.3.1,org.apache.hadoop:hadoop-common:3.3.1') \
    .config('spark.executor.memory', '512m') \
    .config('spark.executor.instances', '2') \
    .config('spark.cores.max', '2') \
    .appName("Jup") \
    .getOrCreate()

s3accessKeyAws = "kGzOWOmt5T8PpVPSffdy"
s3secretKeyAws = "k8QQDCRknm6C7ypigO9QySO1YQyRgkLHUwRLxIG1"
s3endPointLoc = "http://minio:9000"

sc = spark.sparkContext
sc._jsc.hadoopConfiguration().set("fs.s3a.access.key", s3accessKeyAws)
sc._jsc.hadoopConfiguration().set("fs.s3a.secret.key", s3secretKeyAws)
sc._jsc.hadoopConfiguration().set("fs.s3a.endpoint", s3endPointLoc)
sc._jsc.hadoopConfiguration().set("fs.s3a.path.style.access", "true")
sc._jsc.hadoopConfiguration().set("fs.s3a.connection.ssl.enabled", "false")
sc._jsc.hadoopConfiguration().set("fs.s3a.impl", "org.apache.hadoop.fs.s3a.S3AFileSystem")
sc._jsc.hadoopConfiguration().set("fs.s3a.connection.ssl.enabled", "false")
sc._jsc.hadoopConfiguration().set("fs.s3a.connection.establish.timeout", "15000")
sc._jsc.hadoopConfiguration().set("fs.s3a.aws.credentials.provider", "org.apache.hadoop.fs.s3a.SimpleAWSCredentialsProvider")

df = spark.read.csv('s3a://spark-raw/test.csv', header=True)

df.printSchema()
print(df.count())
