/*
 * $AIST_Release: 0.9.0 $
 * Copyright 2011 Information Technology Research Institute, National
 * Institute of Advanced Industrial Science and Technology
 * 
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 * 
 *    http://www.apache.org/licenses/LICENSE-2.0
 * 
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

import org.scalatest.FunSuite

import scala.util.Random

import java.lang.Iterable

import org.apache.hadoop.fs._
import org.apache.hadoop.io._
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat
import org.apache.hadoop.mapreduce.Mapper
import org.apache.hadoop.mapreduce.Reducer
import org.apache.hadoop.mapreduce.Job
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.util.GenericOptionsParser

import hadoop.PBTableOutputFormat

object OutputTest {
  type MapperType  = Mapper[LongWritable, Text, NullWritable, NullWritable]
  type ReducerType = Reducer[NullWritable, NullWritable, BytesWritable, BytesWritable]

  var outlist = List[(Array[Byte], Array[Byte])]()
  var written = false

  class Map extends MapperType {
    override def map(key: LongWritable, value: Text, output: MapperType#Context): Unit = {
      output.write(NullWritable.get, NullWritable.get)
    }
  }

  class Reduce extends ReducerType {
    override def reduce(key: NullWritable, values: Iterable[NullWritable], context: ReducerType#Context): Unit = {
      synchronized {
        if (!written) {
          for ((k, v) <- outlist) {
            context.write(new BytesWritable(k), new BytesWritable(v))
          }
        }
      }
    }
  }
}

class PBTableOutputFormatSuite extends FunSuite {

  val NRECORD = 5000
  val NFILES = 1
  lazy val localFS = FileSystem.getLocal(new Configuration)

  test("PBTableOutputFormat test") {

    val randGen = new Random(1)
    val workdir = new Path("temporary-for-out-test-UHEUNFDSN")

    val r = localFS.mkdirs(workdir)
    if (!localFS.getFileStatus(workdir).isDir()) {
      fail("Can't create temporary input directory")
    }
    try {
      val inpath = new Path("on_hadoop/test_inputs")
      val outpath = new Path(workdir, "outputs")

      val pbfile = new Path(workdir, "pbfiles/file")
      val outfile = new Path(outpath, "part-r-00000")
      val values = util.RandomPBTableMaker.mkFile(randGen, NRECORD, pbfile)
      OutputTest.outlist = values

      val conf = new Configuration()
      new GenericOptionsParser(conf, Array("dummy-arg0"))
      conf.set("mapred.job.tracker", "local")

      val job = new Job(conf, "PBTableInputFormat Test")

      job.setMapperClass(classOf[OutputTest.Map])
      job.setReducerClass(classOf[OutputTest.Reduce])
      job.setOutputKeyClass(classOf[NullWritable])
      job.setOutputValueClass(classOf[NullWritable])

      job.setInputFormatClass(classOf[TextInputFormat])
      job.setOutputFormatClass(classOf[PBTableOutputFormat])

      FileInputFormat.addInputPath(job, inpath)
      FileOutputFormat.setOutputPath(job, outpath)

      assert(job.waitForCompletion(true), "Job failed.")

      val COUNTER_GROUP = "org.apache.hadoop.mapred.Task$Counter"
      val ctrs = job.getCounters();
      val redOut = ctrs.findCounter("org.apache.hadoop.mapred.Task$Counter", "REDUCE_OUTPUT_RECORDS").getValue()

      assert(redOut == NFILES * NRECORD, "Reduce record size is mismatch")
      assert(diff(pbfile.toString, outfile.toString), "data mismatch")

    } finally {
      localFS.delete(workdir, true)
    }
  }


  def diff(fileA: String, fileB: String): Boolean = {
    import java.io.FileInputStream
    var a = 0
    var b = 0

    val isA = new FileInputStream(fileA)
    val isB = new FileInputStream(fileB)

    while (a >= 0) {
      a = isA.read
      b = isB.read

      if (a != b) {
        return false
      }
    }
    return true
  }

}
