package hashLinkGraph;

import java.io.IOException;

import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.Reporter;

import mapred.AbstractMapper;
import mapred.util.BytesBuilder;

public class HashLinkGraph_Mapper extends AbstractMapper<Text, Text, BytesWritable, BytesWritable>{

	@Override
	public void map(Text key, Text value,
			OutputCollector<BytesWritable, BytesWritable> output,
			Reporter reporter) throws IOException {
		// TODO Auto-generated method stub
		ID sp = new ID(key.toString().trim());
		BytesBuilder link = new BytesBuilder();
		String text = value.toString().trim();
		if(!text.isEmpty()) {
			String[] values = text.split("\t");
			for(String v : values) {
				ID dp = new ID(v);
				link.append(dp.toBytes());
			}
			byte[] bs = link.close();
			output.collect(new BytesWritable(sp.toBytes()), 
					new BytesWritable(bs));
			if (bs.length > 0) {
				reporter.incrCounter("hash", "lg",1);
			} else {
				reporter.incrCounter("hash", "non-lg",1);
			}
		} else {
			output.collect(new BytesWritable(sp.toBytes()), 
					new BytesWritable(new byte[0]));
			reporter.incrCounter("hash", "non-lg",1);
		}
		
	}

//	@Override
//	public void map(Text key, Text value, OutputCollector<BytesWritable, BytesWritable> output,
//			Reporter reporter) throws IOException {
//		// TODO Auto-generated method stub
//		ID sp = new ID(key.toString());
//		String text = value.toString().trim();
//		if(!text.isEmpty()) {
//			String[] values = text.split("\t");
//			byte[] dp = new byte[16*values.length];
//			for(int i=0; i < values.length; i++) {
//				ID iddp = new ID (values[i]);
//				for (int j=0;j<16;j++) {
//					dp[i*16+j] = iddp.toBytes()[j];
//				}
//			}
//			output.collect(new BytesWritable(sp.toBytes()), new BytesWritable(dp));
//		} else {
//			output.collect(new BytesWritable(sp.toBytes()),new BytesWritable(new byte[0]) );
//		}
//	}
}
