package mapreduce;

import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Scanner;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
import util.hadoop.HDFSUtil;
import util.init.Location;
import util.ip.IpReg;
import util.math.MathHelp;
import util.math.MyMath;
import util.hadoop.FlowWritable;



/* 输入参数位location，表明当前处理的是哪个地方的数据包
 * datainput/location目录下的文件为输入，输出文件在dataoutput/type3/type3A目录下
 */

public class ProcessType3A {
	
	/*
	 * 标识使用了哪个地址的正则表达式，这个是过滤源地址的
	 * locationRegex的初始化在main方法中
	 */
	public static String locationRegex;
	
	public static class MapClass extends Mapper<LongWritable, Text, FlowWritable, Text> {
	
		IpReg ipReg = new IpReg();
		
		
		@Override
		protected void setup(Mapper<LongWritable, Text, FlowWritable, Text>.Context context)
				throws IOException, InterruptedException {
			// TODO Auto-generated method stub
			//跳过第一行不读取
			context.nextKeyValue();
			
			/*
			 * 初始化源地址过滤器
			 */
			ipReg.setSrcPattern(locationRegex);
		}

		@Override
		protected void map(LongWritable key, Text value, Mapper<LongWritable, Text, FlowWritable, Text>.Context context)
				throws IOException, InterruptedException {
			

/* 包的格式
 * "No.","Time","Source","Destination","Protocol","Source Port","Destination Port","Length"
	"1","0.000000","169.254.34.247","224.0.0.252","LLMNR","54663","5355","75"
	"2","0.102452","10.10.102.30","224.0.0.252","LLMNR","56746","5355","66"
	"3","0.103599","169.254.34.247","224.0.0.252","LLMNR","54663","5355","75"
 */
			// TODO Auto-generated method stub
			
			
				String[] line = value.toString().split(",");
				/*
				 * 这边为了美观，将字符串中的"都替换掉了，但是"在java中是一个
				 * 特殊字符，需要用\来转义
				 */
				
				String source = line[2].replaceAll("\"", "");
				String destination = line[3].replaceAll("\"", "");
				String protocol = line[4].replaceAll("\"", "");

				/*
				 * if中的判断是用来判断目的地址是否位外网地址
				 * 然而这边的过滤方法还有待改进
				 * 是否需要加入源地址的过滤呢？
				 */
				
				if (ipReg.isUserSrc(source) && ipReg.isISP(destination)) {
					
						String time = line[1].replaceAll("\"", "");
						String sourcePort = line[5].replaceAll("\"", "");
						String destinationPort = line[6].replaceAll("\"", "");
						String length = line[7].replaceAll("\"", "");
						
						Text src = new Text(source);
						Text dst = new Text(destination);
						Text pro = new Text(protocol);
						Text srcPort = new Text(sourcePort);
						Text dstPort = new Text(destinationPort);
						FlowWritable flow = new FlowWritable(src, dst, pro, srcPort, dstPort);
						String mapOut = time + "," + length;
						context.write(flow, new Text(mapOut));
						
						
				}
		}
	}
	/*
	 * 对flow处理，没有注释
	 */
	public static class ReduceClass extends Reducer<FlowWritable, Text, NullWritable, Text> {
		int flowNo = 0;
		MyMath timeMath;
		MyMath bytesMath;
		MathHelp mh = new MathHelp();
		@Override
		
		protected void setup(Reducer<FlowWritable, Text, NullWritable, Text>.Context context)
				throws IOException, InterruptedException {
			// TODO Auto-generated method stub
			String out = "Flow no.,Flow[Src~Dst~Pro~Src Port~Dst Port],flow Length(s),"
					+ "Flow Throughput(kb/s),Bytes transferred per flow(bytes),"
					+ "Packets transferred per flow(个),Activity period\r";
			context.write(NullWritable.get(), new Text(out));
		}

		@Override
		protected void reduce(FlowWritable arg0, Iterable<Text> arg1,
				Reducer<FlowWritable, Text, NullWritable, Text>.Context arg2)
						throws IOException, InterruptedException {
			// TODO Auto-generated method stub
			
			String[] timeAndbytes;
			List<Double> timeList = new ArrayList<Double>();
			List<Double> bytesList = new ArrayList<Double>();
			double time;
			double length;
			for (Text text : arg1) {
				timeAndbytes = text.toString().split(",");
				time = Double.valueOf(timeAndbytes[0]);
				length = Double.valueOf(timeAndbytes[1]);
				timeList.add(time);
				bytesList.add(length);
			}
			timeMath = new MyMath(timeList);
			bytesMath = new MyMath(bytesList);
			
			Double flowLength = timeMath.getMax() - timeMath.getMin();
			if (flowLength != 0) {
			flowNo ++;
			Double flowBytes = bytesMath.getSum();
			Double flowThroughput = (flowBytes/flowLength)/1024;
			int packetNum = timeMath.getSize();
			//String src = arg0.toString().replace("[", "");
			String out =  flowNo + "," + arg0.toString() + "," + flowLength + "," + 
			flowThroughput + "," + flowBytes + "," + packetNum + "," + 
					mh.timeHash(timeList) + "\r";
			//String out = "flowNo.,flow[src-dst-pro-srcPort-dstPort],flowLength(s),flowThroughput(bytes/s),flowBytes(bytes),packetNum\r";
			arg2.write(NullWritable.get(), new Text(out));
			}
			
		}
		
	}
	
	
	
	

	
	public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException {
		
		Location init = new Location();
		System.out.println("请输入需要过滤的地址：");
		Scanner scanner = new Scanner(System.in);  
        String location = scanner.nextLine();
		locationRegex = init.getLocationRegex(location);
		
		
		String master = args[0];
		Configuration conf = new Configuration();
		conf.set("fs.defaultFS", "hdfs://" + master + ":9000");
		
		//conf.set("mapreduce.input.keyvaluelinerecordreader.key.value.separator", ",");
		HDFSUtil hdfsUtil = new HDFSUtil(conf);
		Job job = Job.getInstance(conf, "process");
		
		job.setJarByClass(ProcessType3A.class);
		
		job.setMapperClass(MapClass.class);
		job.setReducerClass(ReduceClass.class);
		
		job.setInputFormatClass(TextInputFormat.class);
		
		job.setOutputFormatClass(TextOutputFormat.class);
		
		job.setOutputKeyClass(FlowWritable.class);
		job.setOutputValueClass(Text.class);
		
		Path in  = new Path("hdfs://" + master +":9000/datainput/" + location + "/");
		Path out = new Path("hdfs://" + master + ":9000/dataoutput/type3/type3A/");
		hdfsUtil.delete(out);
		FileInputFormat.addInputPath(job, in);
		FileOutputFormat.setOutputPath(job, out);
		System.out.println(job.waitForCompletion(true) ? 0 : 1);
		Path out1 = new Path("hdfs://" + master + ":9000/dataoutput/type3/type3A/_SUCCESS");
		hdfsUtil.delete(out1);


	}

}
