package com.fanli.bigdata.mytest

import org.apache.spark._
import org.apache.spark.rdd.RDD
import scala.collection.mutable.ArrayBuffer
import org.apache.log4j.{Level, Logger}

object TestDemo {
  Logger.getLogger("org").setLevel(Level.ERROR)

  def main (args: Array[String]) {
    val conf = new SparkConf().setAppName("MySpakDemo1").setMaster("local[*]")
    val sc = new SparkContext(conf)
    var inputRDD = sc.textFile("file:///D:/log.txt")
    val log: RDD[(String,String)] = inputRDD.mapPartitions(iteratorTest(_))
    println(log.collect.mkString(","))
//    val a = sc.parallelize(1 to 9, 3)
//    println(a.mapPartitions(myfunc).collect.mkString(","))

  }

  def myfunc[T](iter: Iterator[T]) : Iterator[(T, T)] = {
    var res = List[(T, T)]()
    var pre = iter.next
    while (iter.hasNext) {
      val cur = iter.next
      res .::= (pre, cur)
      pre = cur
    }
    res.iterator
  }

  def iteratorTest[T](msgs:Iterator[T]):Iterator[(T,T)]={
    val arr = ArrayBuffer[(T,T)]()
    msgs.foreach(msg=>{
      arr+=((msg,msg))
    })
    arr.toIterator
  }

}
