from pyspark.sql import SparkSession
# python xx.py

if __name__ == '__main__':
    spark = SparkSession.builder.appName('wlw_test').getOrCreate()
    # spark = SparkSession.builder.appName("wlw_test") \
    #     .enableHiveSupport().getOrCreate()
    # file_name='mysql_data.txt'
    file_name='s3://shareit.ads.ap-southeast-1/temp/wanglw/source/mysql_data.txt'
    df = spark.read.json(file_name)
    # properties = {'user': 'midas_rw', 'password': 'dhnepHOYu3SrbiUP5c*-6qANLf+9Rt'}
    # url = 'jdbc:mysql://test.main.cbs.ue1.tidb:4000/midas_bill_data?characterEncoding=UTF-8'
    properties = {'user': 'ads_midas_rw', 'password': 'KEaVh_7Dn92tC6iNzB8lX3'}
    url = 'jdbc:mysql://prod.madis.cbs.sg1.tidb:4000/midas_bill_data?characterEncoding=UTF-8'
    df.write.mode('append').option("isolationLevel","NONE").jdbc(url=url, properties=properties, table='t_so_dsp_rev')
