package sparksql

import org.apache.spark.sql.SparkSession

object DataFrameRDDApp {

  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder().appName("DataFrameRDDApp").master("local[2]").getOrCreate();

    // RDD ====> DataFrame
    var rdd = spark.sparkContext.textFile("F:\\lzc\\SparkSQL\\sparksql\\src\\main\\scala\\sparksql\\infos.txt");

    // 需要导入隐士转换
    import spark.implicits._
    var infoDF = rdd.map(_.split(",")).map(line => Info(line(0).toInt, line(1), line(2).toInt)).toDF();
    infoDF.printSchema();

    // api操作
    // infoDF.filter(infoDF.col("age") >= 20).filter(infoDF.col("name")==="wangwu").show();

    // sql 操作
    // infoDF.createOrReplaceTempView("infos");
    // spark.sql("select * from infos where age >= 20 and name = 'wangwu'").show();

    var infoDF2 = rdd.map(_.split(",")).map(line => Info(line(0).toInt, line(1), line(2).toInt)).toDF();
    infoDF.join(infoDF2,infoDF.col("id") === infoDF2.col("id")).show();

    spark.stop();
  }

  case class Info(id:Int,name:String,age:Int);
}
