from pyspark.sql import SparkSession  
  
# 创建SparkSession  
spark = SparkSession.builder.appName("ProductInfoJoin").getOrCreate()  
  
# 加载sales_data.csv并注册为临时表  
sales_data_df = spark.read.csv("sales_data.csv", header=True, inferSchema=True)  
sales_data_df.createOrReplaceTempView("sales_data")  
  
# 加载product_info.csv并注册为临时表  
product_info_df = spark.read.csv("product_info.csv", header=True, inferSchema=True)  
product_info_df.createOrReplaceTempView("product_info")  
  
# 进行JOIN操作并展示结果  
# 假设sales_data.csv和product_info.csv都包含"Product"列，且product_info.csv包含"Category"列  
result_df = spark.sql("""  
    SELECT   
        pi.Product,   
        pi.Category,   
        SUM(sd.Quantity) AS TotalQuantity  
    FROM   
        sales_data sd  
    JOIN   
        product_info pi ON sd.Product = pi.Product  
    GROUP BY   
        pi.Product, pi.Category  
""")   
result_df.show()    
spark.stop()