from pyspark import SparkContext, RDD, Row, SparkConf
from pyspark.sql import SparkSession

conf = SparkConf()\
    .set("spark.executor.extraClassPath", "/Users/sonto/Workspace/Rimi/P1902/spark_example/jars/mysql-connector-java-8.0.13.jar")\
    .set("spark.driver.extraClassPath", "/Users/sonto/Workspace/Rimi/P1902/spark_example/jars/mysql-connector-java-8.0.13.jar")\
    .set("spark.jars", "/Users/sonto/Workspace/Rimi/P1902/spark_example/jars/mysql-connector-java-8.0.13.jar")\
    .setMaster("spark://10.0.0.252:7077")\
    .setAppName("Spark SQL")
sc = SparkContext.getOrCreate(conf)
sc.setLogLevel("INFO")
ss = SparkSession(sparkContext=sc)

# # DATA FRAME FROM DATABASE
df = ss.read.jdbc("jdbc:mysql://129.28.68.96:3306/want_db",
                  table="UserProfile",
                  properties={"user": "root", "password": "Root123!"})
for x in df.rdd.collect():
    print(x)

import pandas as pd

data = pd.DataFrame(((1,2,3), (4,5,6)))
