package Demo2

import java.util

import org.apache.spark.sql.types.{DataTypes, StructField}
import org.apache.spark.sql.{Row, RowFactory, SQLContext, SaveMode}
import org.apache.spark.{SparkConf, SparkContext}

/**
  * Created by lenovo on 2017/11/16.
  */
object SparkSQL_ScalaDemo1 {
  def main(args: Array[String]) {
    System.setProperty("hadoop.home.dir", "E://hadoop-liyadong//hadoop-2.7.1")

    val conf =  new SparkConf().setMaster("local[2]").setAppName("SparkSQL_ScalaDemo1").set("spark.testing.memory","2147480000")
    val sc = new SparkContext(conf)
    val sparkSQL = new SQLContext(sc)

   val arrayJson = Array("{'name':'Justin','score':99}", "{'name':'Andy','score':78}", "{'name':'Michael','score':60}")
    val arrRDD = sc.parallelize(arrayJson)
    val jsonDF = sparkSQL.read.json(arrRDD)
    jsonDF.registerTempTable("studentScore")
    val sql = "select name from studentScore where score > 70"
   val goodDF = sparkSQL.sql(sql)
    val goodList = goodDF.collectAsList()
    for (i  <- 0 until goodList.size()){
          println(goodList.get(i).getAs("name"))
    }

   val studentDF = sparkSQL.read.json("D://spark实训//people.json")
    studentDF.registerTempTable("studentName")
    var sql1 = "select name,age from studentName where name in ("
    for(i <- 0 until goodList.size()){
      sql1 += "'"+goodList.get(i).getAs("name")+"'"
      if(i<goodList.size()-1){
        sql1 += ","
      }
    }
    sql1 += ")"
    println(sql1)
    val goodStudent = sparkSQL.sql(sql1)
   // goodStudent.rdd.map(row => (row.getAs("name").asInstanceOf[String],row.getAs("age").asInstanceOf[String])).foreach(s => println(s._2+"&&*"))
    val goodStudent1RDD = goodStudent.rdd.map(row =>{
      (row.getAs[String]("name"),row.getAs[Long]("age").toString)
    })
   val goodStudentScoreRDD = jsonDF.rdd.map(row =>{
     (row.getAs[String]("name"),row.getAs[Long]("score").toString)
   } )
  val goodStudentRDD = goodStudentScoreRDD.join(goodStudent1RDD)
    goodStudentRDD.foreach(println)

    val rowRDD = goodStudentRDD.map(line =>{
      RowFactory.create(line._1,line._2._1,line._2._2)
    })

    rowRDD.foreach(row =>{
      println(row.getAs(0) )
    })

    val fieldList = new util.ArrayList[StructField]()
    fieldList.add(DataTypes.createStructField("name",DataTypes.StringType,true))
    fieldList.add(DataTypes.createStructField("age",DataTypes.StringType,true))
    fieldList.add(DataTypes.createStructField("score",DataTypes.StringType,true))

    val structType = DataTypes.createStructType(fieldList)
    val goodStudentDF = sparkSQL.createDataFrame(rowRDD,structType)
    goodStudentDF.registerTempTable("")
    goodStudentDF.write.format("json").mode(SaveMode.Overwrite).save("D://GoodStudentScala")
  }
}
