package com.leo.hadoop.reduce;

import java.io.IOException;

import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.conf.Configuration;

import com.leo.util.Schema;
import com.leo.hadoop.*;

public abstract class MyReducer extends Reducer<Text, Text, Text, Text> {
    // TODO: log service
    protected Schema schema;
    protected String input;
    private boolean hasSetOutputSchema = false;
    
    protected MyReducer() {
	initSchema();
	init(getConf());
    }
    
    public void setSchema(Schema schema) {
	this.schema = schema;
    }

    public Schema getSchema() {
	return this.schema;
    }

    public Configuration getConf() {
	return Main.getConf();
    }

    public void initSchema() {
	this.schema = new Schema();
	if (!this.schema.load(getConf().get("schema")))
	    throw new ReducerInitException();
	// TODO : cannot go further without shcema
	input = getConf().get("input");
	
    }

    @Override
    public void reduce(Text key, Iterable<Text> value, Context context) throws IOException, InterruptedException {
	process(key, value, context);
	if (!hasSetOutputSchema) {
	    hasSetOutputSchema = true;
	    getConf().setStrings("schema", getOutputSchema().genConfig());
	}
    }

    public abstract Schema getOutputSchema();

    protected abstract void init(Configuration conf);

    protected abstract void process(Text key, Iterable<Text> value, Context context) throws IOException, InterruptedException;

}


class ReducerInitException extends RuntimeException{
    public ReducerInitException() {
    }

    public ReducerInitException(String str) {
	super(str);
    }
}