package com.darrenchan.spark.sql

import org.apache.spark.sql.SparkSession

/**
  * DataFrame和RDD的互操作一（通过反射的方式）
  * 文件内容：
  * 1,zhangsan,20
  * 2,lisi,30
  * 3,wangwu,40
  */
object DataFrameRDDApp {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder().master("local[2]").appName("DataFrameRDDApp").getOrCreate()

    //主要业务逻辑
    //RDD ==> DataFrame
    val rdd = spark.sparkContext.textFile("infos.txt")

    //注意：需要导入隐式转换
    import spark.implicits._
    val infoDF = rdd.map(_.split(",")).map(line => Info(line(0).toInt, line(1), line(2).toInt)).toDF()

    infoDF.show()

    //1.spark sql查询方式一
    infoDF.filter(infoDF.col("age") > 20).show()

    //2.spark sql查询方式二
    infoDF.createOrReplaceTempView("infos") //设置表名
    spark.sql("select * from infos where age > 20").show()

    spark.stop()
  }


  case class Info(id: Int, name: String, age: Int)
}
