/**
 * 
 */
package name.yzhu.wbdata.util;

import java.io.IOException;

import name.yzhu.cloud.CloudException;
import name.yzhu.cloud.IDriver;
import name.yzhu.cloud.component.YamlDocument;
import name.yzhu.cloud.component.YamlOutputFormat;
import name.yzhu.cloud.hbase.HBaseResult;
import name.yzhu.common.lang.DoubleKeyMap;
import name.yzhu.wbdata.io.WeiboYamlRow;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.hbase.mapreduce.TableMapper;
import org.apache.hadoop.hbase.mapreduce.TableReducer;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.util.GenericOptionsParser;
import org.apache.log4j.Logger;

/**
 * @author yhzhu
 *
 */
public class MRWBToYamlFromHBase implements IDriver {
	private static Logger LOG = Logger.getLogger(MRWBToYamlFromHBase.class);
	/* (non-Javadoc)
	 * @see name.yzhu.cloud.IDriver#getName()
	 */
	@Override
	public String getName() {
		return "MRWBToYamlFromHBase";
	}

	/* (non-Javadoc)
	 * @see name.yzhu.cloud.IDriver#getDescription()
	 */
	@Override
	public String getDescription() {
		return "run mapreduce job to export yaml from hbase to hdfs";
	}

	static class ExportMapper
	  extends TableMapper<Text, YamlDocument> {

		@Override
		protected void map(ImmutableBytesWritable key, Result value,
				Context context) throws IOException, InterruptedException {
			try {
				DoubleKeyMap<String, String, byte[]> dMap = HBaseResult.resultToDoubleKeyMap(value);
				WeiboYamlRow row = new WeiboYamlRow(key.get());
				row.write(dMap);
				YamlDocument yaml = new YamlDocument(row);
				context.write(new Text(key.get()), yaml);
			} catch (CloudException e) {
				LOG.error(key.toString(), e);
				throw new IOException(e);
			}
		}

	}
	
	static class ExportReducer extends TableReducer<Text, YamlDocument, Text>{
		@Override
		public void reduce(Text key, Iterable<YamlDocument> values, Context context)
				  throws IOException, InterruptedException {
				for (YamlDocument yaml : values){
					context.write(key, yaml);
				}
		}
		
	}
	
	public static void main(String[] args) {
		Configuration conf = HBaseConfiguration.create();
	    try {
			String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs();
			if (otherArgs.length < 2) {
		      System.out.println("Wrong number of arguments: " + otherArgs.length);
		      System.exit(-1);
		    }
		    Job job = createSubmittableJob(conf, otherArgs);
		    System.exit(job.waitForCompletion(true)? 0 : 1);
		} catch (Exception e) {
			e.printStackTrace();
		} 
	    
	}

	private static Job createSubmittableJob(Configuration conf,
			String[] args) throws IOException {
		 String tableName = args[0];
		 Path outputDir = new Path(args[1]);
		 FileSystem fs = FileSystem.get(conf);
		 if (fs.exists(outputDir))
			 fs.delete(outputDir, true);
		 
	 	 Job job = new Job(conf, "export " + tableName+" to hdfs");
		 job.setJarByClass(MRWBToYamlFromHBase.class);
		 job.setOutputFormatClass(YamlOutputFormat.class);
		 YamlOutputFormat.setOutput(outputDir);
		 job.setOutputKeyClass(Text.class);
		 job.setOutputValueClass(YamlDocument.class);
		 job.setSpeculativeExecution(false);
		 TableMapReduceUtil.initTableMapperJob(tableName, new Scan(), 
				ExportMapper.class, Text.class, YamlDocument.class, job);
		 TableMapReduceUtil.initTableReducerJob(tableName, ExportReducer.class, job);
				    
		return job;
	}
}
