package com.cike.sparkstudy.sql.scala

import org.apache.spark.sql.types.{IntegerType, StringType, StructField, StructType}
import org.apache.spark.sql.{Row, SQLContext}
import org.apache.spark.{SparkConf, SparkContext}

/**
  * 使用编程方式把RDD和DataFrame互相转换
  *
  */
object RDD2DataFrameProgrammatically extends App {

  val conf = new SparkConf()
    .setMaster("local")
    .setAppName("RDD2DataFrameProgrammatically")
  val sc = new SparkContext(conf)
  val sqlContext = new SQLContext(sc)

  //1、创建普通RDD
  val lines = sc.textFile("/developerCodes/test/students.txt",1)

  val studentRDD = lines.map{ line => Row(line.split(",")(0).toInt,line.split(",")(1),line.split(",")(2).toInt)}

  //2、使用编程方式构建元数据
  val structType = StructType(Array(
    StructField("id",IntegerType,true),
    StructField("name",StringType,true),
    StructField("age",IntegerType,true)))
  //3、进行RDD和DataFrame的转换
  val studentDF = sqlContext.createDataFrame(studentRDD,structType)
  //4、正常使用
  studentDF.registerTempTable("students")

  val teenagerDF = sqlContext.sql("select * from students where age <= 17")

  val teenagerRDD = teenagerDF.rdd.collect().foreach{row => println(row)}


}
