package com.imooc.scala

import org.apache.hadoop.hdfs.DFSClient.Conf

object HelloWorld {
  def main(args: Array[String]): Unit = {
//    println("hello,this is my scala")
//    sayName("is myslq")
//    loadConf()
//    loadConf("spark-product.conf")
//      println(speed(100,10))
//      println(speed(time = 10,distance = 100))
//      println(sum(2,3))
//      println(sum2(1,23,3,4))
//      println(sum2(1,23,3,3,3,3))
//      println(sum2(1,23,3,4,2,3,4,5))
//      println(1 to 10) //前后都闭
//      println(1.to(10))
//      println(Range(1,10))  //前闭后开
//      println(Range(1,9,2))  //前闭后开
//      println(1 until 10)
//      println(1.until(10))
//
//    for(i <- 1 to 10 if i % 2 == 0){
//      println(i)
//    }
//    val courses = Array("hadoop","spark","streaming")
//    for(course <- courses){
//      println(courses)
//    }
//
//    courses.foreach(course => println(course))

    var  (num, sum) = ( 100 ,0)
    while(num > 0){
      sum = sum + num
      num = num - 1
    }
    println(sum)


  }

  def sayName(name: String="PK"): Unit ={
    println(name)
  }

  def loadConf(conf: String="spark-default.conf"): Unit ={
    println(conf)
  }

//  命名参数
  def speed(distance:Float,time:Float):Float = {
    distance/time
  }

//  可变参数
  def sum(a:Int,b:Int) = a+b
  def sum2(numbers:Int*)={

    var result = 0
    for(number <- numbers){
      result += number
    }
    result
  }

  val x =1
  if(x>0) true else false
//循环变大时    1 to 10


}



