/*
 * $AIST_Release: 0.9.0 $
 * Copyright 2011 Information Technology Research Institute, National
 * Institute of Advanced Industrial Science and Technology
 * 
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 * 
 *    http://www.apache.org/licenses/LICENSE-2.0
 * 
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

package hadoop

import util.JavaTypes.JIterable
import scala.collection.JavaConversions._
import org.apache.hadoop.io.BytesWritable
import aggr.Aggregators.Combiner
import aggr.Serializer.serialize
import aggr.Deserializer.deserialize

class SCCombiner extends SCReducerBase[BytesWritable, BytesWritable] {

  override def reduce(key: BytesWritable, values: JIterable[BytesWritable], context: Context): Unit = {
    assert(nTables == 1)

    val k = deserialize(keyType, Utils.copyToByteString(key))
    val data = values.iterator.map { v =>
      (k, deserialize(valType, Utils.copyToByteString(v)))
    }

    aggr.asInstanceOf[Combiner].combine(data) { (k, v) =>
      context.write(
        new BytesWritable(serialize(keyType, k).toByteArray),
        new BytesWritable(serialize(valType, v).toByteArray))
    }
  }
}
