import cn.doitedu.commons.util.SparkUtil
import com.alibaba.fastjson.JSON
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.DataFrame

case class Per(id:String,age:Int,height:Int,weight:Int,salary:Double) extends Product

object JavaBean2DF {
  def main(args: Array[String]): Unit = {

    val spark = SparkUtil.getSparkSession()
    val rdd: RDD[String] = spark.sparkContext.textFile("userprofile/data/demo/persons.txt")

    val rdd2: RDD[Per] = rdd.map(line => {

      val arr: Array[String] = line.split(",")

      val p = Per(arr(0), arr(1).toInt, arr(2).toInt, arr(3).toInt, arr(4).toDouble)

      p
    })

    spark.createDataFrame(rdd2)


    val rdd3: RDD[(String, Int, Int, Int, Double)] = rdd.map(line => {

      val arr: Array[String] = line.split(",")

      val tp = (arr(0), arr(1).toInt, arr(2).toInt, arr(3).toInt, arr(4).toDouble)

      tp
    }
    )


    val df: DataFrame = spark.createDataFrame(rdd3)

    //
    df.printSchema()
    df.show(10, false)


    spark.close()


  }

}
