package com.dongline.te
import org.apache.spark.sql.{SQLContext, SparkSession}
import javafx.application.Application
import javafx.stage.Stage
import org.apache.spark.{SparkConf, SparkContext}

class SqlDemo extends Application{



  override def start(primaryStage: Stage): Unit = {

    val conf:SparkConf=new SparkConf().setAppName("sql").setMaster("local")
    conf.set("spark.testing.memory", "471859200")
    val sc=new SparkContext(conf)

    val spark = new SQLContext(sc)
    //这里必须添加隐式转换，隐式转换必须在创建SQLContext后面添加
    import spark.implicits._
    val rdd1=sc.textFile("D://user.txt").map(x => x.split(","))



    //创建对象行
    val rdd2=rdd1.map(x => Person(x(0).toLong,x(1).trim,x(2).toInt))
    //导入隐式转换 使用 toDF

    val df =rdd2.toDF()

    //df创建表
    df.registerTempTable("t_person")

    df.select("age").write.json("D://aaa")
    //使用sqlsession进行查询
    spark.sparkSession.sql("select * from t_person").show()

  }
}
//实例类，用于对应字段
case class Person(id:Long,name:String,age:Int)