package HadoopWithSpark.class1st

/**
 * @Author TheKernel
 * @Date ${Date}
 * @Version 1.0
 * @Description 1.9 数组
 */
object MyArray {

  def main(args: Array[String]): Unit = {
    // 内容可变, 固定长度
    var arr: Array[String] = new Array[String](3)  // 3 为长度
    var arr2 = Array(1, 2, 3, 4)

    // map 方法
    val result = arr2.map((x: Int) => x * 10)
    println(result)
    val result2 = arr2.map(x => x * 10)
    println(result2)
    val result3 = arr2.map(_ * 10)
    println(result3)

    // word count
    val words = Array("Hello Scala", "Hello Java", "Hello Python")
    val word = words.map(_.split("\\s"))
    println(word.toBuffer)
    val flattenWords = word.flatten  // 将数组扁平化(二维 -> 一维)
    println(flattenWords.toBuffer)
    // 上三步统一
    val result4 = words.flatMap(_.split("\\s")).toBuffer
    println(result4)
    val result5 = result4.groupBy(x => x)
    println(result5)
    // 统计次数
    println(result5.mapValues(_.length).toList.sortBy(x => -x._2))
    println(result5.map(x => (x._1, x._2.length)).toList.sortBy(x => -x._2))

    // 可变数组
    import scala.collection.mutable.ArrayBuffer
    val ab = ArrayBuffer(1, 2, 3, 4)
    ab += (5, 6, 7)
    ab.insert(0, -1)
    println("ab = " + ab)

    // 不可变列表
    val lst = List(1, 2, 3, 4)
    println(lst.head, lst.tail)
    val lst2 = 0::lst
    println("lst2=" + lst2)
    val lst3 = lst.::(0)
    println("lst3=" + lst3)
    val lst4 = 0+:lst
    println("lst4=" + lst4)
    val lst5 = lst.+:(0)
    println("lst5=" + lst5)
    val lst6 = 1::2::3::4::5::Nil
    println("lst6=" + lst6)
    val lst7 = lst++lst6  // ++ 两个List合并
    println("lst7=" + lst7)
    // List 自带方法
    println(lst7.count(x => x >= 3))  // List count
    println(lst7.filter(x => x >= 3))  // List filter
    println(lst7.sorted.reverse)  // List sorted reverse
    // List sortBy 一个元素 sortWith 两个元素进行对比
    val lst8 = List(-0.3, 7.0, 12.0, -5, 0.5, -5.8, 10.0)
    println(lst8.sortBy(x => -1.0 / x))
    val lst9 = List(("a", 0.25), ("b", 0.88), ("c", 0.17), ("d", 0.09))
    println(lst9.sortBy(i => i._2))
    println(lst9.sortWith((x,y) => x._2 >= y._2))
    println(lst8.grouped(2).toList) // grouped 两两进行配对
    val lst10 = lst8.grouped(2).toList.flatten // 去掉分组
    println(lst10)
    println(lst8.fold(0.0)((x, y) => x + y))  // 0.0 为初始值 => (((0+0.5)+7.0)+10.0)+12.0
    val lst11 = List(1,2,3,4,5)
    //foldLeft方法
    println(lst11.foldLeft(0)((x,y) => x - y))
    //foldRight方法
    println(lst11.foldRight(0)((x,y) => x - y))  // 相当于 1-(2-(3-(4-(5-0))))=3
    println(lst11.fold(0)(_ - _))
    println(lst11.foldRight(0)(_ - _))
    val lst12 = List(3, 5, 11, 8, 3)  // reduce
    print(lst12.reduce((x, y) => x - y))
    println(lst12.aggregate(0)(_ + _, _ + _))  // aggregate: 将数据集分区相加，再汇总相加，适合并行计算
    // zip => python.zip
    println(lst12.mkString("/"))  // mkString => python.join
    println(lst4.slice(1, 5))  // slice: 左闭右开

    // 可变列表
    import scala.collection.mutable.ListBuffer
    val lb = ListBuffer(1, 2, 3, 4, 5)
    lb += (6, 7, 8)
    println("lb = " + lb)

  }

}
