package com.ls.bigdata.hadoop.kpi;

import java.io.IOException;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Set;

import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.FileInputFormat;
import org.apache.hadoop.mapred.FileOutputFormat;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.MapReduceBase;
import org.apache.hadoop.mapred.Mapper;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.Reducer;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.mapred.TextInputFormat;
import org.apache.hadoop.mapred.TextOutputFormat;



/**
 * 分类ip地址  统计ip
 * @author zhaot
 *
 */
public class KPIIP {
	//通过内部类  对ip进行map 运算
	public static class KPIIPMapper extends MapReduceBase implements
	Mapper<Object, Text, Text, Text>{
		private Text  word =new Text();
		private Text ips=new Text();
		@Override
		public void map(Object key, Text value, OutputCollector<Text, Text> output, Reporter reporter) throws IOException {
			// TODO Auto-generated method stub

			//按照ip分类
			KPI kpi = KPI.filterIPs(value.toString());
			//判断是否合法的请求
			if(kpi.isValid()){
				//设置请求
				word.set(kpi.getRequest());
				//设置请求地址
				ips.set(kpi.getRemote_addr());
				output.collect(word, ips);
			}

		}

	}



	/**
	 * 通过内部   IP进行分析    reduce阶段
	 * @author zhaot
	 * map（k  ,v）
	 */
	public static class KPIIPReducer extends MapReduceBase implements
	Reducer<Text, Text, Text, Text>{
		private Text  result =new Text();
		private Set<String> count=new  HashSet<String>();
		@Override
		public void reduce(Text key, Iterator<Text> values, OutputCollector<Text, Text> output, Reporter reporter)
				throws IOException {
			// TODO Auto-generated method stub
			//判断迭代器中是否有数据  Iterator
			while (values.hasNext()) {
				//如果有数据  取出数据放到   set 集合中
				count.add(values.next().toString());

			}
			//设置map中的k  value
			result.set(String.valueOf(count.size()));
			output.collect(key, result);
		}


	}


	//测试 获取数据并测�?
	public static void main(String[] args) throws IOException {
		   String input = "hdfs://192.168.151.132:9000/xm/access_log";
	         //   这个路径在hdfs必须没有
	     	String output = "hdfs://192.168.151.132:9000/ip";

	        JobConf conf = new JobConf(KPIIP.class);
	        conf.setJobName("KPIIP");
	        conf.setMapOutputKeyClass(Text.class);
	        conf.setMapOutputValueClass(Text.class);

	        conf.setOutputKeyClass(Text.class);
	        conf.setOutputValueClass(Text.class);

	        conf.setMapperClass(KPIIPMapper.class);
	        conf.setCombinerClass(KPIIPReducer.class);
	        conf.setReducerClass(KPIIPReducer.class);

	        conf.setInputFormat(TextInputFormat.class);
	        conf.setOutputFormat(TextOutputFormat.class);

	        FileInputFormat.setInputPaths(conf, new Path(input));
	        FileOutputFormat.setOutputPath(conf, new Path(output));

	        JobClient.runJob(conf);
		     System.exit(0);
	}
}
