package org.zjt.book

import java.util.Properties

import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.sql.{Row, SQLContext, SaveMode}
import org.apache.spark.sql.types.{StringType, StructField, StructType}

/**
  * DESC    Any是所有的变量类型的父类
  *
  * @author
  * @create 2017-04-25 下午4:46
  **/
object Value {

  def main(args: Array[String]): Unit = {
    /*var name : String = "zhangjuntao"
    var phone1,phone2 :String = "18518518353"
    var array = 1 to 10
    var interset = "hello".intersect("world")
    var numInt : Int = "9".toInt
    var doubleNum : Double = "9.932".toDouble
    var char : Char = "66".toInt.toChar
    numInt += 1
    var random = scala.util.Random
    var distinct = "Hello".distinct //去除重复的字符串

    printf("name:%s phone1:%s array:%s interset:%s numInt:%s doubleNum:%s char:%s random:%s distinct:%s",name,phone1,array,interset,numInt,doubleNum,char,random.nextInt(),distinct)*/


    val conf = new SparkConf().setAppName("RDDToMysql").setMaster("local")
    val sc = new SparkContext(conf)
    val sqlContext = new SQLContext(sc)
    val map1 = Map("id" -> 1, "name" -> "1", "password" -> "123qwe", "address" -> "www.1", "age" -> 10, "level" -> 3, "loginId" -> "san.zhang", "mail" -> "san.zhang@qq.com", "sex" -> "男", "other" -> null)
    val map2 = Map("id" -> 2, "name" -> "2", "password" -> "123qwe", "address" -> "www.3", "age" -> 30, "level" -> 2, "loginId" -> "wen.wang", "mail" -> "wen.wang@qq.com", "sex" -> "女")
    val map3 = Map("id" -> 3, "name" -> "3", "password" -> "123qwe", "address" -> "www.2", "age" -> 20, "level" -> 3, "loginId" -> "si.li", "mail" -> "si.li@qq.com", "sex" -> "男", "other" -> null)
    val data = sc.parallelize(List(map1, map2, map3))
    val colums = map1.keys.toArray.sorted


    val data2 = List(map1, map2, map3)

    val schemaString = "id:INTEGER,name:String,age:INTEGER"
    val schema = StructType(
      schemaString.split(",").map(p => StructField(p.split(":")(0),StringType))
    )

    //    val schema = StructType(
    //      StructField("id", IntegerType) ::
    //        StructField("name", StringType) ::
    //        StructField("age", IntegerType) ::
    //        Nil)

    //      .add("id", IntType, true).add("name", StringType, true).add("age", IntType, true)
    //      StructType (
    //      schemaString.split(",").map(fieldName => StructField(fieldName,StringType,true))
    //    )

    val rowRDD = data.map(
      p => {
        Row.apply(
          schemaString.split(",").map(
            x =>
              p.get(x.split(":")(0)) match {
              case Some(y) => y.toString
              case None => ""
            }
          )
        )
      }
    )
    //

  }
}
