package sparkSql

import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.sql.SQLContext
import sparkSql.HelloSparkSql.personObj

object SparkFile {

  case class personObj(name:String,age:Int,height:Int)
  def main(args: Array[String]): Unit = {

    val conf=new SparkConf().setMaster("local[*]").setAppName("sss")
    val sc=new SparkContext(conf)
    val sqlCon=new SQLContext(sc)
    sc.setLogLevel("error")

    val fileRDD = sc.textFile("D:\\15code\\new.txt")
    val personRDD=fileRDD.map(line=>{
      val s=line.split(",")
      personObj(s(0),s(1).toInt,s(2).toInt)
    })
    import sqlCon.implicits._
    val personDf=personRDD.toDF
    personDf.show()
    /*
    personDf.registerTempTable("persoon")a
    sqlCon.sql("select *from person where age>=20").show()
    personDf.write.save("")//数据加密
    personDf.write.json("")//数据不加密
*/



  }
}
