from pyspark.sql import SparkSession
from pyspark.sql.types import Row

'RDD转换到DataFrame'

spark = SparkSession.builder.getOrCreate()
sc = spark.sparkContext
loan_info = sc.textFile("file:////E:/PyCharm/PythonFolder/json/spark_sql_test2.txt")
# 分割,获取键值
loan_info_list = loan_info.map(lambda x: x.split(','))


# 自定义函数，将RDD的数据，组装成一个类似于表(DataFrame)的数据
def f(x):
    data = {}
    data["product"] = x[0]
    data["price"] = x[1]
    return data


# 调用外部函数创建模式
loan_info_df = loan_info_list.map(lambda x: Row(**f(x))).toDF()
# 必须创建临时表，否则报错
loan_info_df.createOrReplaceTempView("ProInfo")
rows = spark.sql("select * from ProInfo")
# 组装，打印更好看一点
result = rows.rdd.map(lambda x: "product:" + x[0] + ",price:" + x[1])

result.foreach(print)
