package mapreduce;

import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
import util.hadoop.HDFSUtil;
import util.ip.IpReg;
import util.ip.ProCalc;
import util.ip.ProFilter;
import util.hadoop.FlowWritable;


/*
 * flowNo., flow[src-dst-pro-srcPort-dstPort], packetNum, flowLength, min, max, average, median, standard, skew, kurtosis
	1,[192.168.224.80-1.1.1.1-UDP-58368-9001],3,429.0,143.0,143.0,143.0,143.0,0.0,0.0,0.0
	2,[192.168.224.80-1.111.161.175-UDP-10080-30247],2,144.0,72.0,72.0,72.0,72.0,0.0,0.0,0.0
	3,[192.168.224.80-1.183.155.160-TCP-57808-4466],10,2238.0,54.0,1494.0,223.8,54.0,429.644,3.523,5.509
	4,[192.168.224.80-1.188.186.116-TCP-62060-8090],3,194.0,62.0,66.0,64.667,66.0,1.886,-3.182,-0.75
	5,[192.168.224.80-1.189.125.89-UDP-10080-10100],10,1077.0,72.0,140.0,107.7,114.0,23.204,-0.356,-0.901
	6,[192.168.224.80-1.193.81.212-TCP-56993-4466],205,22469.0,54.0,1494.0,109.605,66.0,164.787,4.535,26.121
 */

/*
 *以dataoutput/type3out目录下的为输入，输入数据格式如上所示
 *输出为dataoutput/type2out目录，
 *计算每个ip的tcp,udp,http,https流的数目
 *
 */
public class ProcessType2 {
	

	public static class MapClass extends Mapper<LongWritable, Text, Text, Text> {
		
		
		@Override
		protected void setup(Mapper<LongWritable, Text, Text, Text>.Context context)
				throws IOException, InterruptedException {
			// TODO Auto-generated method stub
			context.nextKeyValue();
		}

		@Override
		protected void map(LongWritable key, Text value, Mapper<LongWritable, Text, Text, Text>.Context context)
				throws IOException, InterruptedException {
			
			String[] flowInfo = value.toString().split(",");
			String[] flow = flowInfo[1].split("~");
			String src = flow[0].replace("[", "");
			String pro = flow[2];
			
			String out = pro; 
			context.write(new Text(src), new Text(out));
			
		}
			

			// TODO Auto-generated method stub
			
	}
	/*
	 * 对flow处理，没有注释
	 */
	public static class ReduceClass extends Reducer<Text, Text, NullWritable, Text> {
		int ipNo = 0;

		public Map<String, Integer> proMapDebug = new HashMap<String, Integer>();
		public List<String> proList = new ArrayList<String>();
		
		ProFilter proFilter;
		Set<String> tcp;
		Set<String> http;
		Set<String> https;
		Set<String> udp;
		@Override
		protected void setup(Reducer<Text, Text, NullWritable, Text>.Context context)
				throws IOException, InterruptedException {
			// TODO Auto-generated method stub
			String out = "Ip no.,Src Ip,Http,Https,Tcp,Udp\r";
			context.write(NullWritable.get(), new Text(out));
			
			proFilter = new ProFilter();
			tcp = proFilter.getTcp();
			http = proFilter.getHttp();
			https = proFilter.getHttps();
			udp = proFilter.getUdp();
			
		}
		
		@Override
		protected void reduce(Text arg0, Iterable<Text> arg1,
				Reducer<Text, Text, NullWritable, Text>.Context arg2)
						throws IOException, InterruptedException {
			//Map<String, Integer> proMapDebug = proMapDebug;
			ipNo ++;
			int httpNum = 0;
			int httpsNum = 0;
			int tcpNum = 0;
			int udpNum = 0;
			for (Text text : arg1) {
				String pro = text.toString().toLowerCase();
				if (http.contains(pro)) {
					httpNum++;
				} else if(https.contains(pro)) {
					httpsNum++;
				} else if(tcp.contains(pro)) {
					tcpNum ++;
				} else if (udp.contains(pro)) {
					udpNum++;
				}
			}
			
			/*String out = "flowNo:" + flowNum + ",packetNum:" + packetNum 
					+  ",flowBytes:" + flowBytes + "\r";*/
			//"ipNo.,ip,http,https,tcp,udp";
			String out  = ipNo + "," + arg0.toString() + "," + httpNum + 
					"," + httpsNum + "," + tcpNum + "," + udpNum + "\r";
			
			arg2.write(NullWritable.get(), new Text(out));
		}
		
	}
	
	
	
	
	
	public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException {
		String master = args[0];
		Configuration conf = new Configuration();
		conf.set("fs.defaultFS", "hdfs://" + master + ":9000");
		
		//conf.set("mapreduce.input.keyvaluelinerecordreader.key.value.separator", ",");
		HDFSUtil hdfsUtil = new HDFSUtil(conf);
		Job job = Job.getInstance(conf, "process");
		
		job.setJarByClass(ProcessType2.class);
		
		job.setMapperClass(MapClass.class);
		job.setReducerClass(ReduceClass.class);
		
		job.setInputFormatClass(TextInputFormat.class);
		
		job.setOutputFormatClass(TextOutputFormat.class);
		
		job.setOutputKeyClass(Text.class);
		job.setOutputValueClass(Text.class);

		Path in  = new Path("hdfs://" + master + ":9000/dataoutput/type3/type3A/");
		Path out = new Path("hdfs://" + master + ":9000/dataoutput/type2/");
		hdfsUtil.delete(out);
		
		FileInputFormat.addInputPath(job, in);
		FileOutputFormat.setOutputPath(job, out);
		System.out.println(job.waitForCompletion(true) ? 0 : 1);
		Path out1 = new Path("hdfs://" + master + ":9000/dataoutput/type2/_SUCCESS");
		hdfsUtil.delete(out1);

	}

}
