from pyspark.sql import SparkSession
print("aaa")
spark = SparkSession.builder.appName(
    'read_write').master('spark://192.168.8.161:7077').getOrCreate()

print("aaa")
jdbcDF = spark.read \
    .format("jdbc") \
    .option("url", "jdbc:sqlserver://10.160.12.192:1433") \
    .option("DatabaseName", "MRTrace") \
    .option("user", "sa") \
    .option("password", "Docimax@123") \
    .option("dbtable", "Table_Current") \
    .load()

print(jdbcDF.count())
