package db.prob.operators.join;

import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;

import db.prob.io.LineParser;
import db.prob.io.PTuple;

public class MapperJoin extends Mapper<LongWritable, Text, Text, Text> {
	
	public static final String TABLE_1 = "table_1";
	public static final String TABLE_2 = "table_2";
	public static final String RESULT_TABLE = "result_table";
	public static final String TABLE_1_ATTR = "table_1_attr";
	public static final String TABLE_2_ATTR = "table_2_attr";
	public static final String PASS = "pass";

	@Override
	protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
		
		PTuple tuple = LineParser.createPTuple(value.toString(),"\\t");
		Configuration conf = context.getConfiguration();
		
		String table1 = conf.get(TABLE_1);
		String table2 = conf.get(TABLE_2);
		
		int table1Attr = conf.getInt(TABLE_1_ATTR, Integer.MAX_VALUE);
		int table2Attr = conf.getInt(TABLE_2_ATTR, Integer.MAX_VALUE);
		
		Text newKey = new Text();
		
		if(tuple.getTableName().equals(table1)) {
			newKey.set(tuple.getAttr()[table1Attr]);
		} else if (tuple.getTableName().equals(table2)) {
			newKey.set(tuple.getAttr()[table2Attr]);
		} else {
			newKey.set(PASS);
		}
		
		context.write(newKey, value);
	}

}


