/*
 * $AIST_Release: 0.9.0 $
 * Copyright 2011 Information Technology Research Institute, National
 * Institute of Advanced Industrial Science and Technology
 * 
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 * 
 *    http://www.apache.org/licenses/LICENSE-2.0
 * 
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

import org.scalatest.FunSuite

import scala.util.Random
import scala.collection.JavaConversions._
import scala.math.Ordering._

import java.lang.Iterable
import java.io.File
import java.io.FileOutputStream
import java.util.Arrays

import org.apache.hadoop.fs._
import org.apache.hadoop.io._
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat
import org.apache.hadoop.mapreduce.Mapper
import org.apache.hadoop.mapreduce.Reducer
import org.apache.hadoop.mapreduce.Job
import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.util.GenericOptionsParser

import hadoop.PBTableInputFormat

object InputTest {
  type MapperType  = Mapper[BytesWritable, BytesWritable, BytesWritable, BytesWritable]
  type ReducerType = Reducer[BytesWritable, BytesWritable, NullWritable, NullWritable]

  var outlist = List[(Array[Byte], Array[Byte])]()

  class Map extends MapperType {
    override def map(key: BytesWritable, value: BytesWritable, output: MapperType#Context): Unit = {
      output.write(key, value)
    }
  }

  class Reduce extends ReducerType {
    override def reduce(key: BytesWritable, values: Iterable[BytesWritable], context: ReducerType#Context): Unit = {
      synchronized {
        for (v <- values) {
          val kb = Arrays.copyOf(key.getBytes, key.getLength)
          val vb = Arrays.copyOf(v.getBytes, v.getLength)
          outlist = (kb, vb)::outlist
        }
      }
    }
  }
}

class PBTableInputFormatSuite extends FunSuite {

  val NRECORD = 500
  val NFILES = 10
  lazy val localFS = FileSystem.getLocal(new Configuration)

  test("PBTableInputFormat test") {

    val randGen = new Random(1)
    
    val workdir = new Path("temporary-for-test-UHEUNFDSN")

    val r = localFS.mkdirs(workdir)
    if (!localFS.getFileStatus(workdir).isDir()) {
      fail("Can't create temporary input directory")
    }
    try {
      val inpath = new Path(workdir, "inputs")
      val outpath = new Path(workdir, "outputs")
      localFS.mkdirs(inpath)

      val values = (1 to NFILES).flatMap { i => 
        util.RandomPBTableMaker.mkFile(randGen, NRECORD, new Path(inpath, "file" + i))
      }

      val conf = new Configuration()
      new GenericOptionsParser(conf, Array("dummy-arg0"))
      conf.set("mapred.job.tracker", "local")

      val job = new Job(conf, "PBTableInputFormat Test")

      job.setMapperClass(classOf[InputTest.Map])
      // job.setCombinerClass(classOf[InputTest.Reduce])
      job.setReducerClass(classOf[InputTest.Reduce])
      job.setOutputKeyClass(classOf[BytesWritable])
      job.setOutputValueClass(classOf[BytesWritable])

      job.setInputFormatClass(classOf[PBTableInputFormat])
      job.setOutputFormatClass(classOf[NullOutputFormat[NullWritable, NullWritable]])

      FileInputFormat.addInputPath(job, inpath)
      FileOutputFormat.setOutputPath(job, outpath)

      assert(job.waitForCompletion(true), "Job failed.")

      val COUNTER_GROUP = "org.apache.hadoop.mapred.Task$Counter"
      val ctrs = job.getCounters();
      val mapIn = ctrs.findCounter("org.apache.hadoop.mapred.Task$Counter", "MAP_INPUT_RECORDS").getValue()

      assert(mapIn == NFILES * NRECORD, "mapper record size is mismatch")
    
      implicit object ArrayByte extends Ordering[Array[Byte]] {
        def compare(lhs: Array[Byte], rhs: Array[Byte]) = {
          lhs.zipAll(rhs, 0: Byte, 0: Byte).map { case (l, r) => l compare r }.find(_ != 0).getOrElse(0)
        }
      }

      val o = Ordering.Iterable(Ordering.Tuple2(ArrayByte, ArrayByte))

      val a = values.toList.sorted
      val b = InputTest.outlist.sorted
    
      assert((o.compare(a, b)) == 0, "data mismatch")


    } finally {
      localFS.delete(workdir, true)
    }
  }
}
