package cn.lecosa.spark.mianshi

import org.apache.spark.SparkContext
import org.apache.spark.SparkConf
import scala.collection.mutable.ArrayBuffer
import scala.util.Random
import org.apache.spark.sql.SQLContext

case class Car(start: Int, end: Int, price: Int) extends java.io.Serializable

object Car {
  def main(args: Array[String]) {

    val conf = new SparkConf().setMaster("local[2]").setAppName("CarApp")
    conf.set("fs.defaultFS","file:///");
    val sc = new SparkContext(conf)
     val sqlContext = new SQLContext(sc)
    import sqlContext.implicits._
    val lines = sc.textFile("file///D:/spark/workspace/bigdata/data/car", 3)
    val df1 = lines.map(_.split(",")).map( attr => {Car(  attr(0).toInt, attr(1).toInt,attr(2).toInt  )}   ).toDF()
    df1.show()
    
    
//        cache 和persist
        
//    要改site文件
      lines.foreach { println }
//    println(rdd.collect().toBuffer)
    sc.stop()
  }
}