package mapreduce;

import java.io.IOException;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.MultipleOutputs;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;

import mapreduce.ProcessType2.MapClass;
import mapreduce.ProcessType2.ReduceClass;
import util.hadoop.FlowWritable;
import util.hadoop.HDFSUtil;
import util.math.MyMath;

/*
 * flowNo.,flow[src-dst-pro-srcPort-dstPort],flowLength(s),flowThroughput(bytes/s),flowBytes(bytes),packetNum
	1,[192.168.224.80-1.1.1.1-UDP-58368-9001],10.080245000001014,42.558489401791014,429.0,3
	2,[192.168.224.80-1.111.161.175-UDP-10080-30247],0.007195000000137952,20013.8985402695,144.0,2
	3,[192.168.224.80-1.183.155.160-TCP-57808-4466],107.45859000000019,20.826627261720038,2238.0,10
*/


/*
 * dataoutput/type3out目录下的文件为输入，输出文件在dataoutput/type33outB目录下，
 * 为每个流的一些特征值
 */
public class ProcessType1 {
	
	public static double MAX = 0.0;
	public static double MIN = 0.0;
	
	
	public static class MapClass extends Mapper<LongWritable, Text, Text, Text> {

		@Override
		protected void map(LongWritable key, Text value, Mapper<LongWritable, Text, Text, Text>.Context context)
				throws IOException, InterruptedException {
/*			Flow no.,Flow[Src-Dst-Pro-Src Port-DstPort],flow Length(s),Flow Throughput(kb/s),Bytes transferred per flow(bytes),Packets transferred per flow(个),Activity period
			1,[172.16.200.1-103.38.232.82-UDP-42383-3478],0.8381049999999277,0.5220109652132343,448.0,4,51~51~51~51
			2,[172.16.200.1-113.107.202.67-UDP-42383-3478],0.17237299999999323,1.9035753859363873,336.0,3,51~51~51*/
			// TODO Auto-generated method stub
			String[] flow = value.toString().split(",");
			String[] tuple = flow[1].split("~");
			String IP = tuple[0].replace("[", "");
			//String IP = tuple[0];
			String activityPeriod = flow[6];
			String[] period = activityPeriod.split("~");
			for (String string : period) {
				if (Double.valueOf(string) > MAX) {
					MAX = Double.valueOf(string);
				}
				if (Double.valueOf(string) < MIN) {
					MIN = Double.valueOf(string);
				}
			}
			String flowInfo = flow[2] + "," +  flow[3] + "," + flow[4] + "," 
							+ flow[5] + "," + flow[6];
			
			context.write(new Text(IP), new Text(flowInfo));
		}

		@Override
		protected void setup(Mapper<LongWritable, Text, Text, Text>.Context context)
				throws IOException, InterruptedException {
			// TODO Auto-generated method stub
			context.nextKeyValue();
		}
		
	}
	/*
	 * 对flow处理，没有注释
	 */
	public static class ReduceClass extends Reducer<Text, Text, NullWritable, Text> {
		int ipNo = 0;
		double TIME = MAX - MIN;
		/*
		 * flowNo.,flow[src-dst-pro-srcPort-dstPort],flowLength(s),flowThroughput(bytes/s),flowBytes(bytes),packetNum
			1,[192.168.224.80-1.1.1.1-UDP-58368-9001],10.080245000001014,42.558489401791014,429.0,3
			2,[192.168.224.80-1.111.161.175-UDP-10080-30247],0.007195000000137952,20013.8985402695,144.0,2
			3,[192.168.224.80-1.183.155.160-TCP-57808-4466],107.45859000000019,20.826627261720038,2238.0,10
		*/

		@Override
		protected void reduce(Text arg0, Iterable<Text> arg1, Reducer<Text, Text, NullWritable, Text>.Context arg2)
				throws IOException, InterruptedException {
			ipNo ++;
			int periodNum = 0;
			Set<String> periodSet = new HashSet<String>();
			
			// TODO Auto-generated method stub
			String IP = arg0.toString();
			String[] flowInfo;
			int flowNum = 0;
			double bytes = 0;
			int packetNum = 0;
			double throughputSum = 0;
			for (Text text : arg1) {
			//	flowLength(s),flowThroughput(bytes/s),flowBytes(bytes),packetNum
				flowNum ++;
				//System.out.println(text.toString());
				flowInfo = text.toString().split(",");
				//System.out.println("==========" + flowInfo[4]);
				String[] period = flowInfo[4].split("~");
				for (String string : period) {
					periodSet.add(string);
				}
				
				bytes += Double.valueOf(flowInfo[2]);
				packetNum += Double.valueOf(flowInfo[3]);
				throughputSum += Double.valueOf(flowInfo[1]);
			}
			
			for (String string : periodSet) {
				periodNum++;
			}
			 /*"Ip no.,Src Ip,Total number of flows,Percentage ON time, Number of activity periods"
				+ ",Number of bytes transferred,Number of packets transferred,Average throughput\r";*/
			String out = ipNo + "," + IP + "," + flowNum + "," + (periodNum/TIME)*100 + "," + periodNum + 
					"," + bytes + "," + packetNum + "," 
						+ throughputSum/flowNum + "\r";
			arg2.write(NullWritable.get(), new Text(out));
			periodSet.clear();
		}

		@Override
		protected void setup(Reducer<Text, Text, NullWritable, Text>.Context context)
				throws IOException, InterruptedException {
			// TODO Auto-generated method stub
			//String out = "Ip no.,Src Ip,flowNum,bytes,packetNum,averageThroughput(kb/s)\r";
			String out = "Ip no.,Src Ip,Total number of flows,Percentage ON time(%), Number of activity periods"
					+ ",Number of bytes transferred,Number of packets transferred,Average throughput\r";
			//context.nextKeyValue();
			context.write(NullWritable.get(), new Text(out) );
		}
	}

	public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException {
		String master = args[0];
		Configuration conf = new Configuration();
		conf.set("fs.defaultFS", "hdfs://" + master + ":9000");
		
		//conf.set("mapreduce.input.keyvaluelinerecordreader.key.value.separator", ",");
		HDFSUtil hdfsUtil = new HDFSUtil(conf);
		Job job = Job.getInstance(conf, "process");
		
		job.setJarByClass(ProcessType1.class);
		
		job.setMapperClass(MapClass.class);
		job.setReducerClass(ReduceClass.class);
		
		job.setInputFormatClass(TextInputFormat.class);
		
		job.setOutputFormatClass(TextOutputFormat.class);
		job.setOutputKeyClass(Text.class);
		job.setOutputValueClass(Text.class);

		Path in  = new Path("hdfs://" + master + ":9000/dataoutput/type3/type3A/");
		Path out = new Path("hdfs://" + master + ":9000/dataoutput/type1/");
		hdfsUtil.delete(out);
		
		FileInputFormat.addInputPath(job, in);
		FileOutputFormat.setOutputPath(job, out);
		System.out.println(job.waitForCompletion(true) ? 0 : 1);
		Path out1 = new Path("hdfs://" + master + ":9000/dataoutput/type1/_SUCCESS");
		hdfsUtil.delete(out1);
	}
}
