/*
 * $AIST_Release: 0.9.0 $
 * Copyright 2011 Information Technology Research Institute, National
 * Institute of Advanced Industrial Science and Technology
 * 
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 * 
 *    http://www.apache.org/licenses/LICENSE-2.0
 * 
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

package hadoop

import java.io.InputStream
import org.apache.hadoop.fs.Path
import org.apache.hadoop.mapreduce.InputSplit
import org.apache.hadoop.mapreduce.JobContext
import org.apache.hadoop.mapreduce.TaskAttemptContext
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat
import org.apache.hadoop.mapreduce.RecordReader
import org.apache.hadoop.io.BytesWritable
import com.google.protobuf.CodedInputStream
import com.google.protobuf.CodedOutputStream
import org.apache.hadoop.mapreduce.lib.input.FileSplit
import pbdata.Tables

class PBTableRecordReader extends RecordReader[BytesWritable, BytesWritable] {
  
  var codedInput: CodedInputStream = null
  var input: InputStream = null
  var currentKey: BytesWritable = new BytesWritable()
  var currentValue: BytesWritable = new BytesWritable()
  var length: Long = 0
  var nRead: Long = 0

  override def getCurrentKey(): BytesWritable = currentKey
  override def getCurrentValue(): BytesWritable = currentValue

  override def nextKeyValue(): Boolean = {
    val tag = codedInput.readTag()
    if (tag == 0) {
      return false // EOF
    } 

    val builder = Tables.KeyValue.newBuilder()
    codedInput.readMessage(builder, null)
    codedInput.resetSizeCounter()

    val keyVal = builder.build()

    nRead += CodedOutputStream.computeMessageSize(1, keyVal)

    currentKey   = new BytesWritable(keyVal.getKey().toByteArray()) 
    currentValue = new BytesWritable(keyVal.getValue().toByteArray())

    return true
  }

  override def initialize(genericSplit: InputSplit, context: TaskAttemptContext): Unit = {
    val split = genericSplit.asInstanceOf[FileSplit]
    val job = context.getConfiguration()
    val file = split.getPath()
    val fs = file.getFileSystem(job);
    
    input = fs.open(file)
    codedInput = CodedInputStream.newInstance(input)
    length = split.getLength()
  }

  override def getProgress(): Float = {
    if (length == nRead) {
      return 0.0f
    } else {
      return long2float(nRead) / long2float(length)
    }
  }

  override def close(): Unit = {
    if (input != null) {
      input.close()
    }
  }
}

class PBTableInputFormat extends FileInputFormat[BytesWritable, BytesWritable] {

  override def createRecordReader(split: InputSplit, context: TaskAttemptContext): RecordReader[BytesWritable, BytesWritable] = {
    new PBTableRecordReader()
  }

  override def isSplitable(context: JobContext, file: Path): Boolean = false
}

