package com.makao.mtez.degrid;

import java.io.IOException;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
import org.apache.hadoop.util.ToolRunner;
import org.apache.tez.client.TezClient;
import org.apache.tez.dag.api.DAG;
import org.apache.tez.dag.api.Edge;
import org.apache.tez.dag.api.EdgeProperty;
import org.apache.tez.dag.api.ProcessorDescriptor;
import org.apache.tez.dag.api.TezConfiguration;
import org.apache.tez.dag.api.Vertex;
import org.apache.tez.mapreduce.input.MRInput;
import org.apache.tez.mapreduce.output.MROutput;
import org.apache.tez.mapreduce.processor.SimpleMRProcessor;
import org.apache.tez.runtime.api.LogicalInput;
import org.apache.tez.runtime.api.LogicalOutput;
import org.apache.tez.runtime.api.ProcessorContext;
import org.apache.tez.runtime.api.Reader;
import org.apache.tez.runtime.library.api.KeyValueReader;
import org.apache.tez.runtime.library.api.KeyValueWriter;
import org.apache.tez.runtime.library.api.TezRuntimeConfiguration;
import org.apache.tez.runtime.library.conf.UnorderedKVEdgeConfig;
import org.apache.tez.runtime.library.conf.UnorderedPartitionedKVEdgeConfig;
import org.apache.tez.runtime.library.partitioner.HashPartitioner;
import org.apache.tez.runtime.library.processor.SimpleProcessor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import com.google.common.base.Preconditions;
import com.makao.mtez.HashJoinExample;
import com.makao.mtez.TezExampleBase;
import com.makao.mtez.HashJoinExample.ForwardingProcessor;
import com.makao.mtez.HashJoinExample.HashJoinProcessor;

/**
 * @author makao
 * @date 2016/12
 * 参数: <file1> <file2> <numPartitions> <outPath> [doBroadcast]
 * 本地模式(不使用broadcast join):
 * E://tez//Input//file01 E://tez//Input//file02 1 E://tez//Output//Cogroup
 * 集群:
 * hadoop jar Cogroup.jar /home/hadoop/makao/input/file01 /home/hadoop/makao/input/file02 1 /home/hadoop/makao/output/Cogroup
 */
public class Cogroup extends TezExampleBase {
	private static final Logger LOG = LoggerFactory.getLogger(Cogroup.class);
	private static final String broadcastOption = "doBroadcast";
	private static final String streamingSide = "streamingSide";
	private static final String hashSide = "hashSide";
	private static final String inputFile = "inputFile";
	private static final String joiner = "joiner";
	private static final String joinOutput = "joinOutput";

	public static void main(String[] args) throws Exception {
		Configuration conf = new Configuration();
		//本地模式时使用
		/*conf.setBoolean(TezConfiguration.TEZ_LOCAL_MODE, true);
		conf.set("fs.default.name", "file:///");
		conf.setBoolean(TezRuntimeConfiguration.TEZ_RUNTIME_OPTIMIZE_LOCAL_FETCH, true);*/

		int status = ToolRunner.run(conf, new Cogroup(), args);
		System.exit(status);
	}

	@Override
	protected void printUsage() {
		System.err.println("Usage: " + "hashjoin <file1> <file2> <numPartitions> <outPath> [" + broadcastOption
				+ "(default false)]");
	}

	@Override
	protected int validateArgs(String[] otherArgs) {
		if (!(otherArgs.length == 4 || otherArgs.length == 5)) {
			return 2;
		}
		return 0;
	}

	@Override
	protected int runJob(String[] args, TezConfiguration tezConf, TezClient tezClient) throws Exception {

		boolean doBroadcast = args.length == 5 && args[4].equals(broadcastOption) ? true : false;
		LOG.info("Running Cogroup" + (doBroadcast ? "-WithBroadcast" : ""));

		String streamInputDir = args[0];
		String hashInputDir = args[1];
		int numPartitions = Integer.parseInt(args[2]);
		String outputDir = args[3];

		Path streamInputPath = new Path(streamInputDir);
		Path hashInputPath = new Path(hashInputDir);
		Path outputPath = new Path(outputDir);

		FileSystem fs = FileSystem.get(tezConf);
		if (fs.exists(outputPath)) {
			System.err.println("Output directory: " + outputDir + " already exists");
			return 3;
		}
		if (numPartitions <= 0) {
			System.err.println("NumPartitions must be > 0");
			return 4;
		}

		DAG dag = createDag(tezConf, streamInputPath, hashInputPath, outputPath, numPartitions, doBroadcast);

		return runDag(dag, isCountersLog(), LOG);
	}
	
	  private DAG createDag(TezConfiguration tezConf, Path streamPath,
		      Path hashPath, Path outPath, int numPartitions, boolean doBroadcast)
		      throws IOException {
		  DAG dag = DAG.create("Cogroup" + (doBroadcast ? "-WithBroadcast" : ""));
		    Vertex hashFileVertex =
		            Vertex.create(hashSide,
		                ProcessorDescriptor.create(ForwardingProcessor.class.getName()))
		                .addDataSource(
		                    inputFile,
		                    MRInput
		                        .createConfigBuilder(new Configuration(tezConf),
		                        TextInputFormat.class, hashPath.toUri().toString())//mock输入数据，输入类型和路径为空
		                        .groupSplits(!isDisableSplitGrouping())
		                        .generateSplitsInAM(!isGenerateSplitInClient()).build());

		        Vertex streamFileVertex =
		            Vertex.create(streamingSide,
		                ProcessorDescriptor.create(ForwardingProcessor.class.getName()))
		                .addDataSource(
		                    inputFile,
		                    MRInput
		                        .createConfigBuilder(new Configuration(tezConf),
		                        TextInputFormat.class, streamPath.toUri().toString())
		                        .groupSplits(!isDisableSplitGrouping())
		                        .generateSplitsInAM(!isGenerateSplitInClient()).build());

		        Vertex joinVertex =
		            Vertex.create(joiner,
		                ProcessorDescriptor.create(HashJoinProcessor.class.getName()),
		                numPartitions).addDataSink(
		                joinOutput,
		                MROutput.createConfigBuilder(new Configuration(tezConf),
		                    TextOutputFormat.class, outPath.toUri().toString()).build());

		        UnorderedPartitionedKVEdgeConfig streamConf =
		            UnorderedPartitionedKVEdgeConfig
		                .newBuilder(IntArray.class.getName(), IntWritable.class.getName(),//有修改
		                    HashPartitioner.class.getName())
		                .setFromConfiguration(tezConf)
		                .build();

		        Edge e1 =
		            Edge.create(streamFileVertex, joinVertex,
		                streamConf.createDefaultEdgeProperty());

		        EdgeProperty hashSideEdgeProperty = null;
		        if (doBroadcast) {
		          UnorderedKVEdgeConfig broadcastConf =
		              UnorderedKVEdgeConfig
		                  .newBuilder(IntArray.class.getName(), IntWritable.class.getName())//有修改
		                  .setFromConfiguration(tezConf)
		                  .build();
		          hashSideEdgeProperty = broadcastConf.createDefaultBroadcastEdgeProperty();
		        } else {
		          hashSideEdgeProperty = streamConf.createDefaultEdgeProperty();
		        }

		        Edge e2 = Edge.create(hashFileVertex, joinVertex, hashSideEdgeProperty);

		        dag.addVertex(streamFileVertex).addVertex(hashFileVertex)
		            .addVertex(joinVertex).addEdge(e1).addEdge(e2);
		        return dag;
	  }

	  public static class ForwardingProcessor extends SimpleProcessor {
		    public ForwardingProcessor(ProcessorContext context) {
		      super(context);
		    }
		    
		    @Override
		    public void run() throws Exception {
		      Preconditions.checkState(getInputs().size() == 1);
		      Preconditions.checkState(getOutputs().size() == 1);
		    
		      LogicalInput input = getInputs().values().iterator().next();
		      Reader rawReader = input.getReader();
		      Preconditions.checkState(rawReader instanceof KeyValueReader);
		      LogicalOutput output = getOutputs().values().iterator().next();

		      KeyValueReader reader = (KeyValueReader) rawReader;
		      KeyValueWriter writer = (KeyValueWriter) output.getWriter();
		      
		      //mock读入的数据
		      for(int beam=0;beam<1;beam++)
		    		for(int major_loop=0;major_loop<1;major_loop++)
		    			for(int frequency=0;frequency<5;frequency++)
		    				for(int time=0;time<1;time++)
		    					for(int facet=0;facet<49;facet++)
		    						for(int polarisation=0;polarisation<4;polarisation++)
		    						{
		    							Integer[] temp=new Integer[6];
		    							temp[0]=beam;
		    							temp[1]=major_loop;
		    							temp[2]=frequency;
		    							temp[3]=time;
		    							temp[4]=facet;
		    							temp[5]=polarisation;
		    							IntArray intarray=new IntArray(6,temp);
		    							writer.write(intarray, new IntWritable(1));
		    							//writer.write(new Text("h"+temp[0]+"h"+temp[1]+"h"+temp[2]+"h"+temp[3]+"h"+temp[4]+"h"+temp[5]), new IntWritable(1));
		    							System.out.println("    key  is  "+temp[0]+"   "+temp[4]);
		    						}
		    }
		  }
	  
	  public static class HashJoinProcessor extends SimpleMRProcessor {

		    public HashJoinProcessor(ProcessorContext context) {
		      super(context);
		    }

		    @Override
		    public void run() throws Exception {
		      Preconditions.checkState(getInputs().size() == 2);
		      Preconditions.checkState(getOutputs().size() == 1);
		      LogicalInput streamInput = getInputs().get(streamingSide);
		      LogicalInput hashInput = getInputs().get(hashSide);
		      Reader rawStreamReader = streamInput.getReader();
		      Reader rawHashReader = hashInput.getReader();
		      Preconditions.checkState(rawStreamReader instanceof KeyValueReader);
		      Preconditions.checkState(rawHashReader instanceof KeyValueReader);
		      LogicalOutput lo = getOutputs().get(joinOutput);
		      Preconditions.checkState(lo.getWriter() instanceof KeyValueWriter);
		      KeyValueWriter writer = (KeyValueWriter) lo.getWriter();

		      KeyValueReader hashKvReader = (KeyValueReader) rawHashReader;
		      Set<IntArray> keySet = new HashSet<IntArray>();
		      while (hashKvReader.next()) {
		    	 Object o = hashKvReader.getCurrentKey();
		    	 keySet.add(new IntArray((IntArray)o));
		      }

		      KeyValueReader streamKvReader = (KeyValueReader) rawStreamReader;
		      while (streamKvReader.next()) {
		        IntArray key = (IntArray) streamKvReader.getCurrentKey();
		        if (keySet.contains(key)) {
		        	byte[] buffer=new byte[1024];
		        	writer.write(key, new BytesWritable(buffer,1024));
		        }
		      }
		    }
		  }
}
