
from pyspark.sql import SparkSession

# Initialize Spark Session with Hive support
spark = SparkSession.builder \
    .appName("CarSalesPrediction") \
    .master("local") \
    .enableHiveSupport() \
    .config("hive.metastore.uris","thrift://master:9083") \
    .getOrCreate()

# Read data from Hive
df = spark.sql("SELECT * FROM car_info_city_clean")

df.write \
    .format("jdbc") \
    .option("url", "jdbc:mysql://localhost:3306/car_sales") \
    .option("dbtable", "car_info_city") \
    .option("user", "root") \
    .option("password", "123456") \
    .mode("overwrite") \
    .save()
