package com.etc.datefrme

import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{DataFrame, SQLContext}
import org.apache.spark.{SparkConf, SparkContext}

case class Person(val name: String, val age: Int)


object sparksql {

  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setAppName("hello").setMaster("local")
    val sc = new SparkContext(conf)
    val sparksql = new SQLContext(sc)

    val lines = sc.textFile("e:\\a.txt")

    val personrdd :RDD[Person]= lines.map(line => {
      val strings = line.split(" ")
      Person(strings(0), strings(1).toInt)
    })
    import sparksql.implicits._
    val per:DataFrame = personrdd.toDF

    per.createOrReplaceTempView("person")

    val frame :DataFrame= sparksql.sql("select * from person")

    frame.show()

    sc.stop()

  }


}
