from pyspark.sql import SparkSession

# 创建SparkSession
spark = SparkSession.builder \
    .appName("SQLite JDBC Example") \
    .config("spark.jars", "/usr/local/spark/jars/sqlite-connector-java/sqlite-jdbc.jar") \
    .getOrCreate()

# 读取SQLite数据库中的LiveRoom表
jdbcDF = spark.read \
    .format("jdbc") \
    .option("url", "jdbc:sqlite:/home/hadoop/SparkDouyin/Douyin.db") \
    .option("driver", "org.sqlite.JDBC") \
    .option("dbtable", "LiveRoom") \
    .load()

# 展示表内数据
jdbcDF.show()

# 停止SparkSession
spark.stop()
