package apweb;

import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Hashtable;
import java.util.Iterator;
import java.util.Random;
import java.util.Set;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.Reducer.Context;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.apache.log4j.Logger;

public class MapJoinOverMapReduce extends Configured implements Tool {
	private static final Logger sLogger = Logger.getLogger(MapJoinOverMapReduce.class);

	// mapper: emits (token, 1) for every word occurrence
	private static class MyMapper extends Mapper<LongWritable, Text, Text, Text> {

		// reuse objects to save overhead of object creation
		Random randomGenerator = new Random();
		private static Hashtable<String, ArrayList<String>> hashTable = new Hashtable<String, ArrayList<String>>();
		private FileSystem fs = null;
		private String localTablePath = "smallTableFile.txt";
		String[] tags=null;
		String leftTag = null;
		String rightTag = null;
		int leftIndex=0;
		int rightIndex=0;
		public void before_map(Context context)
		{
			if( hashTable!=null && hashTable.size()>0)
			{
				System.out.println("There are records in hashtable: " + hashTable.size());
				return;
			}
			//begin to read the small table
			Configuration conf = context.getConfiguration();
			if(fs ==null)
			{
				try {
					fs = FileSystem.get(conf);
				} catch (IOException e) {
					// TODO Auto-generated catch block
					e.printStackTrace();
				}
			}
			
			String smallTablePath = conf.get("SmallTable");
			int rightIndex = conf.getInt("rightIndex", 0);
			tags = conf.get("tags").split(",");
			leftTag = tags[0].trim();
			rightTag = tags[1].trim();
			
			try {
				/*
				 * read all files from hadoop
				 */
				FileStatus[] fst = fs.listStatus(new Path(smallTablePath));
				System.out.println("smallTablePath:"+smallTablePath);
				System.out.println("smallTablePath files:" + fst.length);
				hashTable.clear();
				
				for(int i=1; i < fst.length; i++)
				{
					String appendixName = String.valueOf(Math.random());
					String localFileName = localTablePath+appendixName;
					System.out.println("file:"+i+"\t"+fst[i].getPath());
					fs.copyToLocalFile(fst[i].getPath(), new Path(localFileName));
					//begin to read the file into hashtable
					FileReader freader = null;
					BufferedReader bufReader = null;
					freader = new FileReader(localFileName);
					bufReader = new BufferedReader(freader);
					String line= bufReader.readLine();
					while(line!=null)
					{
						//
						System.out.println("readInput: " + line);
						String[] tmp = line.trim().split("\t");
						if(!tmp[tmp.length-1].equals(rightTag))
							continue;
						if(hashTable.containsKey(tmp[rightIndex]))
						{
							hashTable.get(tmp[rightIndex]).add(line.trim());
						}
						else
						{
							ArrayList<String> tmplist = new ArrayList<String>();
							tmplist.add(line.trim());
							hashTable.put(tmp[rightIndex], tmplist);
						}
						line = bufReader.readLine();
					}
					bufReader.close();
					freader.close();
					System.out.println(localFileName);
					File tmpfile = new File(localFileName);
					tmpfile.delete();
				}

			} catch (IOException e) {
				// TODO Auto-generated catch block
				e.printStackTrace();
			}
			
		}
		
		
		@Override
		public void map(LongWritable key, Text value, Context context) throws IOException,
				InterruptedException {
			//
			
			
			Configuration conf = context.getConfiguration();
			before_map(context);
			if(hashTable==null || hashTable.size()<=0)
			{
//				System.out.println("hashTable is unavailible.");
				return;
			}
			else
			{
				System.out.println("hashtable size:" + hashTable.size());
			}
			tags = conf.get("tags").split(",");
			leftTag = tags[0].trim();
			rightTag = tags[1].trim();
			leftIndex = conf.getInt("leftIndex", 0);
			rightIndex = conf.getInt("rightIndex", 0);
			System.out.println("Index and Tags: " + leftIndex+"-"+rightIndex+"\t"+leftTag+"-"+rightTag);
			String[] valuelist = value.toString().split("\t");//column1,column2,column3,tag
			
			//begin to join
			String tmpKey = "BadKey";
			if(valuelist[valuelist.length-1].trim().equals(leftTag))
			{
				tmpKey = valuelist[leftIndex];
				if(!hashTable.containsKey(tmpKey))
				{
					System.out.println("NotMatch for key: " + tmpKey);
					return;
				}
				ArrayList<String> records = hashTable.get(tmpKey);
				for(String leftrecord:records)
				{
					context.write(new Text(tmpKey), new Text(ToolsCollections.combine(value.toString().trim(), leftrecord.trim())));
				}
			}
			else
			{
				return;
			}
		}
	}

	private static int printUsage() {
		System.out.println("usage: [left-path] [right-path] [leftIndex,rightIndex] [left-tag,right-tag] [out-path]");
		ToolRunner.printGenericCommandUsage(System.out);
		return -1;
	}

	/**
	 * Runs this tool.
	 */
	public int run(String[] args) throws Exception {
		if (args.length != 5) {
			System.out.println("MapJoinOverMapReduce, args size:"+args.length);
			System.out.println(args.toString());
			printUsage();
			return -1;
		}

		String leftPath = args[0];
		String rightPath = args[1];
		
		int leftIndex = Integer.parseInt(args[2].split(",")[0]);
		int rightIndex = Integer.parseInt(args[2].split(",")[1]);
		
		String leftTag = args[3].split(",")[0];
		String rightTag = args[3].split(",")[1];
		
		String outputPath = args[4];


		sLogger.info("Tool: MapJoinOverMapReduce");
		sLogger.info(" - input path: " + leftPath+"\t"+rightPath);
		sLogger.info(" - output path: " + outputPath);
		sLogger.info(" - tags: " + leftTag+"\t"+rightTag);
		sLogger.info(" - join: " + leftIndex+"\t"+rightIndex);

		Configuration conf = new Configuration();
		
//		conf.setStrings("tags", leftTag, rightTag);
		conf.set("tags", args[3]);
		conf.setInt("leftIndex", leftIndex);
		conf.setInt("rightIndex", rightIndex);
//		conf.set("mapred.child.java.opts", "-Xmx1024m");
		conf.set("SmallTable", rightPath);//add the small table for mapjoin

		Job job = new Job(conf, "MapJoinOverMapReduce");
		job.setJarByClass(MapJoinOverMapReduce.class);

//		FileInputFormat.setInputPaths(job, leftPath+","+rightPath);//add input path
		FileInputFormat.setInputPaths(job, leftPath);
		FileOutputFormat.setOutputPath(job, new Path(outputPath));

		job.setOutputKeyClass(Text.class);
		job.setOutputValueClass(Text.class);

		job.setMapperClass(MyMapper.class);
		job.setNumReduceTasks(0);
//		job.setCombinerClass(MyReducer.class);
//		job.setReducerClass(MyReducer.class);

		// Delete the output directory if it exists already
		Path outputDir = new Path(outputPath);
		FileSystem.get(conf).delete(outputDir, true);

		long startTime = System.currentTimeMillis();
		job.waitForCompletion(true);
		sLogger.info("Job TestMapReduce Finished in " + (System.currentTimeMillis() - startTime) / 1000.0
				+ " seconds");

		return 0;
	}

	/**
	 * Dispatches command-line arguments to the tool via the
	 * <code>ToolRunner</code>.
	 */
	public static void main(String[] args) throws Exception {
		int res = ToolRunner.run(new Configuration(), new MapJoinOverMapReduce(), args);
		System.exit(res);
	}
}
