/*
 * $AIST_Release: 0.9.0 $
 * Copyright 2011 Information Technology Research Institute, National
 * Institute of Advanced Industrial Science and Technology
 * 
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 * 
 *    http://www.apache.org/licenses/LICENSE-2.0
 * 
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

package hadoop

import java.io.DataOutputStream
import org.apache.hadoop.mapreduce.TaskAttemptContext
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat
import org.apache.hadoop.mapreduce.RecordWriter
import org.apache.hadoop.io.BytesWritable
import org.apache.hadoop.io.compress.CompressionCodec;
import org.apache.hadoop.io.compress.GzipCodec;
import org.apache.hadoop.util.ReflectionUtils
import com.google.protobuf.CodedOutputStream
import pbdata.Tables

class PBTableRecordWriter(out: DataOutputStream) extends RecordWriter[BytesWritable, BytesWritable] {

  val KEYVALUE_TAG = 1
  val codedOutput = CodedOutputStream.newInstance(out)

  override def write(key: BytesWritable, value: BytesWritable): Unit = {
    synchronized {
      val keyval = Tables.KeyValue.newBuilder()
          .setKey(Utils.copyToByteString(key))
          .setValue(Utils.copyToByteString(value))
          .build()

      codedOutput.writeMessage(KEYVALUE_TAG, keyval)
    }
  }

  override def close(job: TaskAttemptContext) = {
    codedOutput.flush()
    out.close()
  }
}

class PBTableOutputFormat extends FileOutputFormat[BytesWritable, BytesWritable] {

  def getRecordWriter(job: TaskAttemptContext): RecordWriter[BytesWritable, BytesWritable] = {
    val conf = job.getConfiguration()
    val isCompressed = FileOutputFormat.getCompressOutput(job)
    var codec: CompressionCodec = null
    var extension = ""

    if (isCompressed) {
        val codecClass = FileOutputFormat.getOutputCompressorClass(job, classOf[GzipCodec])
        codec = ReflectionUtils.newInstance(codecClass, conf).asInstanceOf[CompressionCodec]
        extension = codec.getDefaultExtension()
    }

    val file = getDefaultWorkFile(job, extension)
    val fs = file.getFileSystem(conf)
    val fileOut = fs.create(file, false)
      
    if (!isCompressed) {
      return new PBTableRecordWriter(fileOut)
    } else {
      return new PBTableRecordWriter(new DataOutputStream(codec.createOutputStream(fileOut)))
    }
  }
}

