package Demo1

import org.apache.spark.{SparkConf, SparkContext}
import spire.std.map

/**
  * Created by Administrator on 2017/2/9 0009.
  */
object test1 {

  def main(args: Array[String]) {
//    val s=new Student("张三",22)
//    println(s.name+","+s.age)
//    println("**************")
//    val s1=new Student("张三",33,"sljfl")
//
//    println(s1.gender+","+s1.name)
//
//    val a=9::List(5,2)
//    println(a.toBuffer)
//
//    val b=9::5::2::Nil
//    println(b.toBuffer)

//    val str = Array(10)
//    for ( i <- 0 to str.length-1){
//      println("**"+str(i))
//    }
//
//    val str1 = new Array(10)
//    for (i <-1 to str1.length-1){
//      println("&&"+str1(i))
//    }

 // val arr = Array(1,2,3)

    val conf = new SparkConf().setMaster("local[*}").setAppName("test1").set("spark.testing.memory","2147480000")
    val sc = new SparkContext(conf)
   // val list = map((2,4) -> 5,(2,3) -> 6,(3,1) -> 2)
  }
}
