package mapreduce;

import java.io.IOException;
import java.util.ArrayList;
import java.util.List;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.MultipleOutputs;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
import util.hadoop.HDFSUtil;
import util.math.MyMath;

/*
 * flowNo.,flow[src-dst-pro-srcPort-dstPort],flowLength(s),flowThroughput(bytes/s),flowBytes(bytes),packetNum
	1,[192.168.224.80-1.1.1.1-UDP-58368-9001],10.080245000001014,42.558489401791014,429.0,3
	2,[192.168.224.80-1.111.161.175-UDP-10080-30247],0.007195000000137952,20013.8985402695,144.0,2
	3,[192.168.224.80-1.183.155.160-TCP-57808-4466],107.45859000000019,20.826627261720038,2238.0,10
*/


/*
 * dataoutput/type3/type3A目录下的文件为输入，输出文件在dataoutput/type3/type3目录下，
 * 为每个流的一些特征值
 */
public class ProcessType3B {
	
	public static class MapClass extends Mapper<LongWritable, Text, Text, Text> {

		@Override
		protected void map(LongWritable key, Text value, Mapper<LongWritable, Text, Text, Text>.Context context)
				throws IOException, InterruptedException {
			
			// TODO Auto-generated method stub
			String[] allFlowInfo = value.toString().split(",");
			String[] tuple = allFlowInfo[1].split("~");
			String IP = tuple[0].replace("[", "");
			//String IP = tuple[0];
			String flowInfo = allFlowInfo[2] + "," +  allFlowInfo[3] + "," + allFlowInfo[4] + "," 
							+ allFlowInfo[5];
			
			context.write(new Text(IP), new Text(flowInfo));
		}

		@Override
		protected void setup(Mapper<LongWritable, Text, Text, Text>.Context context)
				throws IOException, InterruptedException {
			// TODO Auto-generated method stub
			context.nextKeyValue();
		}
		
	}
	/*
	 * 对flow处理，没有注释
	 */
	public static class ReduceClass extends Reducer<Text, Text, NullWritable, Text> {

		MultipleOutputs<NullWritable, Text> mos;

		List<Double> lengthList = new ArrayList<Double>();
		List<Double> throughputList = new ArrayList<Double>();
		List<Double> bytesList = new ArrayList<Double>();
		List<Double> packetList = new ArrayList<Double>();
		MyMath mm1;
		MyMath mm2;
		MyMath mm3;
		MyMath mm4;
		int ipNo = 0;
		
		@Override
		protected void reduce(Text arg0, Iterable<Text> arg1,
				Reducer<Text, Text, NullWritable, Text>.Context arg2) throws IOException, InterruptedException {
			// TODO Auto-generated method stub
			ipNo++;
			
			for (Text text : arg1) {
				String[] flowInfo = text.toString().split(",");
				lengthList.add(Double.valueOf(flowInfo[0]));
				throughputList.add(Double.valueOf(flowInfo[1]));
				bytesList.add(Double.valueOf(flowInfo[2]));
				packetList.add(Double.valueOf(flowInfo[3]));
			}
			
			mm1 = new MyMath(lengthList);
			//lengthList.clear();
			mos.write("type3", NullWritable.get(), new Text(ipNo + "," + arg0.toString() + "," + mm1.getMin() + "," + mm1.getMax() 
			+ "," + mm1.getAve() + "," + mm1.getMedian() + "," + mm1.getStandard()
			+ "," + mm1.getSkew() + "," + mm1.getKurt() + "\r"), "FlowLength");
			lengthList.clear();
			
			mm4 = new MyMath(throughputList);
			//throughputList.clear();
			mos.write("type3", NullWritable.get(), new Text(ipNo + "," + arg0.toString() + "," + mm4.getMin() + "," + mm4.getMax() 
			+ "," + mm4.getAve() + "," + mm4.getMedian() + "," + mm4.getStandard()
			+ "," + mm4.getSkew() + "," + mm4.getKurt() +  "\r"), "FlowThroughput");
			throughputList.clear();
			
			mm2 = new MyMath(bytesList);
			//bytesList.clear();
			mos.write("type3", NullWritable.get(), new Text(ipNo + "," + arg0.toString() + "," + mm2.getMin() + "," + mm2.getMax() 
			+ "," + mm2.getAve() + "," + mm2.getMedian() + "," + mm2.getStandard()
			+ "," + mm2.getSkew() + "," + mm2.getKurt() + "\r"), "FlowBytes");
			bytesList.clear();
			
			mm3 = new MyMath(packetList);
		//	packetList.clear();
			mos.write("type3", NullWritable.get(), new Text(ipNo +  "," + arg0.toString() + "," + mm3.getMin() + "," + mm3.getMax() 
			+ "," + mm3.getAve() + "," + mm3.getMedian() + "," + mm3.getStandard()
			+ "," + mm3.getSkew() + "," + mm3.getKurt() + "\r"), "FlowPackets");
			packetList.clear();
			
			mos.write("type3", NullWritable.get(), new Text(arg0.toString() + "\r"),"User");
			
			
		}

		@Override
		protected void setup(Reducer<Text, Text, NullWritable, Text>.Context context)
				throws IOException, InterruptedException {
			// TODO Auto-generated method stub
			String flowLength = "Ip no.,Src Ip,Min,Max,Average,Median,Standard,Skewness,Kurtosis\r";
			String flowThroughput = "Ip no.,Src Ip,Min,Max,Average,Median,Standard,Skewness,Kurtosis\r";
			String flowBytes = "Ip no.,Src Ip,Min,Max,Average,Median,Standard,Skewness,Kurtosis\r";
			String flowPackets = "Ip no.,Src Ip,Min,Max,Average,Median,Standard,Skewness,Kurtosis\r";
			
			mos = new MultipleOutputs<>(context);
			
			mos.write("type3", NullWritable.get(), new Text(flowLength), "FlowLength");
			mos.write("type3", NullWritable.get(), new Text(flowThroughput), "FlowThroughput");
			mos.write("type3", NullWritable.get(), new Text(flowBytes), "FlowBytes");
			mos.write("type3", NullWritable.get(), new Text(flowPackets), "FlowPackets");
		}

		@Override
		protected void cleanup(Reducer<Text, Text, NullWritable, Text>.Context context)
				throws IOException, InterruptedException {
			// TODO Auto-generated method stub
			mos.close();
		}
		
	}
	
	
	
	

	
	public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException {
		String master = args[0];
		//String path = args[1];
		Configuration conf = new Configuration();
		conf.set("fs.defaultFS", "hdfs://" + master + ":9000");
		
		//conf.set("mapreduce.input.keyvaluelinerecordreader.key.value.separator", ",");
		HDFSUtil hdfsUtil = new HDFSUtil(conf);
		Job job = Job.getInstance(conf, "process");
		
		job.setJarByClass(ProcessType3B.class);
		
		job.setMapperClass(MapClass.class);
		job.setReducerClass(ReduceClass.class);
		
		job.setInputFormatClass(TextInputFormat.class);
		
		MultipleOutputs.addNamedOutput(job, "type3", TextOutputFormat.class, NullWritable.class, Text.class);
		
		job.setOutputKeyClass(Text.class);
		job.setOutputValueClass(Text.class);

		Path in  = new Path("hdfs://" + master + ":9000/dataoutput/type3/type3A/");
		Path out = new Path("hdfs://" + master + ":9000/dataoutput/type3/type3/");
		hdfsUtil.delete(out);
		
		FileInputFormat.addInputPath(job, in);
		FileOutputFormat.setOutputPath(job, out);
		System.out.println(job.waitForCompletion(true) ? 0 : 1);
		
		Path out1 = new Path("hdfs://" + master + ":9000/dataoutput/type3/type3/_SUCCESS");
		Path out2 = new Path("hdfs://" + master + ":9000/dataoutput/type3/type3/part-r-00000");
		hdfsUtil.delete(out1);
		hdfsUtil.delete(out2);

	}

}
