package HadoopWithSpark.class1st

/**
 * @Author TheKernel
 * @Date 2019/9/25 2:27 下午
 * @Version 1.0
 * @Description 1.1 数据类型
 */
object DataType {

  def main(args: Array[String]): Unit = {
    // 7 种数值类型: Byte, Char, Short, Int, Long, Float, Double, Boolean, Unit(void)用()表示
    val name = "浙小商"  // scala -> val = java -> final
    var age = 18  // variable
    val song: String = "凉凉"
    println("name: " + name, "age: " + age, "song: " + song)  // (name: 浙小商,age: 18,song: 凉凉)
    println("name: " + name + " age: " + age + " song: " + song)  // name: 浙小商 age: 18 song: 凉凉

    // 函数型变量
    var s: Unit = println("Hello Scala")
    println("s=" + s)  // 调用s时输出, s返回值为Unit => Hello Scala s=()
  }

}
