package spark.work

//import breeze.linalg.{max, sum}
//import com.alibaba.fastjson.{JSON, JSONArray, JSONObject}
import com.alibaba.fastjson.{JSON, JSONArray, JSONObject}
import org.apache.spark.sql.{Column, SparkSession}
import org.apache.spark.{SparkConf, SparkContext}
import util.RandomUtils

import scala.collection.JavaConverters._
import scala.collection.mutable.ArrayBuffer
//import org.apache.spark.sql.functions._

/**
  * Created by liuwei on 2017/11/8.
  */
object SplitData {

  def main(args: Array[String]): Unit = {
    val sparkConf = new SparkConf().setAppName("RowToColumnTest").setMaster("local[8]")
    val sc = new SparkContext(sparkConf)
    val spark = SparkSession.builder

    val ss = spark.getOrCreate();

    val df = ss.createDataFrame(Seq(
      ("张三","女","数学",60,15,"张老师"),
      ("张三","女","语文",70,12,"李老师"), ("张三","女","语文",70,12,"李老师"),
      ("张三","女","物理",50,22,"张老师"),
      ("王二","男","数学",50,16,"赵老师"),
      ("王二","男","语文",60,20,"孙老师"),
      ("王二","男","历史",50,22,"李老师")
    )).toDF("姓名", "性别","科目","成绩","排名","监考老师")
    import org.apache.spark.sql.functions._
//    df.createOrReplaceTempView("test")
//    import org.apache.spark.sql._
//    val sqlContext = new SQLContext(sc)
//    val sqlDF = sqlContext.sql("SELECT * FROM test")
//    val a = sqlDF.toJSON.collectAsList();
    val rdd = df.rdd.zipWithIndex()


    val arr = rdd.takeSample(false, 4, 47)//.map(_._1).toSeq.asJava
    val schema = df.schema
    val a = ss.sparkContext.parallelize(arr)
//    val df2  = ss.createDataFrame(arr,schema)
//    df2.show

//    val dataResultRdd = rdd.sample(false, 0.5, 47)
//    val otherRDD = rdd.subtract(dataResultRdd);
//    val dataResultDf = ss.createDataFrame(dataResultRdd.map(_._1), df.schema)
//    val otherResultDf = ss.createDataFrame(otherRDD.map(_._1), df.schema)
//    dataResultDf.show(false)
//    otherResultDf.show(false)

   println(df.schema)
   val res = df.toJSON
    import scala.collection.JavaConverters._
    import scala.collection.JavaConversions._
    val headList = df.columns
    val titleJson =  new JSONArray
    val headType = df.schema.foreach(structField=>{
      val title =  new JSONObject
      title.put("name",structField.name)
      title.put("type",structField.dataType.typeName)
        titleJson.add(title)
    })
    val result = new JSONObject()
    val jsonResult = df.toJSON.collectAsList();



    import scala.collection.JavaConverters._
    val jsonArray = new JSONArray
    jsonResult.asScala.foreach(json=>{
      jsonArray.add(JSON.parseObject(json))
    })
    val columnJson =  new JSONArray
    headList.foreach(columnJson.add(_))
    result.put("title",titleJson)
    result.put("res",jsonArray)
    println(result)
//    val jsonArray = new JSONArray()
//    df.map(row=>{
//      val rowJson = new JSONObject()
//      for(column <- headList){
//        val value = row.getAs(column)
//        rowJson.put("column",value)
//      }
//    })

//    df.show
//    val a= df.toString()
    //    df.map(data=>{
//      println(data)
//    })

//    var rdd1 = sc.makeRDD(Seq(1,2,2,3,3))
//    var rdd2 = sc.makeRDD(3 to 4)
//    val res222 = rdd1.subtract(rdd2,2).collect
//    println(res222)
  }

}
