import sys
import time
import pymysql
from pyspark.sql import HiveContext
from pyspark import SparkConf, SparkContext, SQLContext
from pyspark.sql import SparkSession

get_df_url = "jdbc:mysql://192.168.200.200:53306/iFinD_data"
get_df_driver = "oracle.jdbc.driver.OracleDriver"
get_df_user = "qi0907"
get_df_password = "Xuf0gQQzwPtw0D#6"
if __name__ == '__main__':
    sqlContext = SparkSession.builder.appName("inc_dd_openings").config("spark.sql.warehouse.dir",
                                                                        "hdfs://hadoop001:8020/dataset/hive").config(
        "hive.metastore.uris", "thrift://hadoop001:9083").enableHiveSupport().getOrCreate()

    df = SQLContext(sqlContext).read.format("jdbc").options(
        url="jdbc:mysql://192.168.200.200:43306/iFinD_data?useUnicode=true&characterEncoding=utf-8",
        driver="com.mysql.cj.jdbc.Driver",
        dbtable="indicator_data",
        user="qi0907",
        password="Xuf0gQQzwPtw0D#6"

    ).load()
    df.show(5)
