package com.ls.bigdata.hadoop.iplocation;

import java.io.IOException;
import java.util.Iterator;
import java.util.StringTokenizer;

import com.ls.bigdata.hadoop.kpi.KPI;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.FileInputFormat;
import org.apache.hadoop.mapred.FileOutputFormat;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.MapReduceBase;
import org.apache.hadoop.mapred.Mapper;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.Reducer;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.mapred.TextInputFormat;
import org.apache.hadoop.mapred.TextOutputFormat;



/**
 * ������ip�͵���
 * @author zhaot
 *
 */
public class KPIAddress  {
	// ��ͳ�Ƶ�
	public static int  ipnum=0;
	/**
	 * ip��ַ��map����
	 * @author zhaot
	 *
	 */
	public  static  class KPIAddressMapper extends MapReduceBase implements Mapper<Object, Text, Text, IntWritable>{
		//����������

		private IntWritable one =new  IntWritable(1);	//�����
		private Text  ips=new  Text();//������
		@Override
		public void map(Object key, Text value, OutputCollector<Text, IntWritable> output, Reporter reporter)
				throws IOException {
				KPI kpi = KPI.filterIPs(value.toString());
				//�жϷ����Ƿ�Ϸ�
				if(kpi.isValid()){
					//��text��ֵ
				     ips.set(kpi.getRemote_addr());

				     System.out.println("ips....."+ips.toString());
				     //�Ŵ󼯺���
				     output.collect(ips, one);
				}


		}


	}
	/**
	 * ip��ַ��reducer����
	 * @author zhaot
	 *
	 */

	public static  class  KPIAddressReducer extends MapReduceBase implements Reducer<Text, IntWritable,Text, Text>{
	  //����ip�͵��������   �õ��˴������ݿ�
		IPSeeker  ipSeeker =IPSeeker.getInstance();
		@Override
		public void reduce(Text key, Iterator<IntWritable> values, OutputCollector<Text, Text> output,
				Reporter reporter) throws IOException {
			// TODO Auto-generated method stub
			String address = ipSeeker.getAddress(key.toString());
			//sum  ͳ��
			int sum=0;
			while (values.hasNext()) {
				sum=sum+values.next().get();
				System.out.println("sum...."+sum);
			}
			//ת���ַ���
			String  outvalue=Integer.toString(sum)+"+"+address;
			System.out.println("outvalue....."+outvalue);
			//�ŵ�����
			output.collect(key, new  Text(outvalue));
		}

	}
	/**
	 * ����  map�׶�
	 * @author zhaot
	 *
	 */

	public  static  class KPIAddressMapper2 extends MapReduceBase  implements Mapper<Object, Text,Text, IntWritable>{

		@Override
		public void map(Object key, Text value, OutputCollector<Text, IntWritable> output, Reporter reporter)
				throws IOException {
			// TODO Auto-generated method stub
		 String  line= value.toString();
		 //�����ݴ��뵽StringTokenizer��
		 StringTokenizer tokenizer = new StringTokenizer(line);
		 String str = "";
		 //���hasMoreTokens���ж�   ������ķ��������ַ���
		 while (tokenizer.hasMoreTokens()) {
			String token = tokenizer.nextToken();
			//�ж� �ַ����Ƿ���  +
			if(token.contains("+")){
				str=token;
				break;
			}

		}
		 //��ȡ+�ŵ�  �±�

		 int seq =str.indexOf("+");

		 int num =Integer.parseInt(str.substring(0,seq));
		 //��seq����ʼ��ȡ���ݡ�
		 String  address =str.substring(seq +1);
		 ipnum=ipnum+num;
		 output.collect(new Text(address), new IntWritable(num));

		}

	}

	public  static  class  KPIAddressReducer2 extends MapReduceBase implements Reducer<Text, IntWritable, Text, Text>{
		//IPSeeker.getInstance();
		IPSeeker  ipSeeker=IPSeeker.getInstance();
		@Override
		public void reduce(Text key, Iterator<IntWritable> values, OutputCollector<Text, Text> output, Reporter reporter)
				throws IOException {
			// TODO Auto-generated method stub
			int  sum=0;
			while (values.hasNext()) {
				sum=sum+values.next().get();

			}
			float percent =(float)sum/(float)ipnum*100;
			String tmp ="  "+toString().valueOf(percent)+"%";
			output.collect(key, new Text(tmp));
		}



	}

	//����

	public static void main(String[] args) throws IOException {
		String input = "hdfs://192.168.151.132:9000/xm/access_log";
        //hdfs����û�����·��
		String output = "hdfs://192.168.151.132:9000/hadoop/output";
       //hdfs����û�������ַ
		Path tempDir = new Path("hdfs://192.168.151.132:9000/y2");

		JobConf conf = new JobConf(KPIAddress.class);
		conf.setJobName("KPIAddress");

		conf.setMapOutputKeyClass(Text.class);
		conf.setMapOutputValueClass(IntWritable.class);

		conf.setOutputKeyClass(Text.class);
		conf.setOutputValueClass(Text.class);

		conf.setMapperClass(KPIAddressMapper.class);
		// conf.setCombinerClass(null);
		conf.setReducerClass(KPIAddressReducer.class);

		conf.setInputFormat(TextInputFormat.class);
		conf.setOutputFormat(TextOutputFormat.class);

		FileInputFormat.setInputPaths(conf, new Path(input));
		FileOutputFormat.setOutputPath(conf, (tempDir));

		JobClient.runJob(conf);

		JobConf conf2 = new JobConf(KPIAddress.class);
		conf2.setJobName("KPIAddress2");

		conf2.setMapOutputKeyClass(Text.class);
		conf2.setMapOutputValueClass(IntWritable.class);

		conf2.setOutputKeyClass(Text.class);
		conf2.setOutputValueClass(Text.class);

		conf2.setMapperClass(KPIAddressMapper2.class);
		// conf.setCombinerClass(null);
		conf2.setReducerClass(KPIAddressReducer2.class);

		conf2.setInputFormat(TextInputFormat.class);
		conf2.setOutputFormat(TextOutputFormat.class);

		FileInputFormat.setInputPaths(conf2, tempDir);
		FileOutputFormat.setOutputPath(conf2, new Path(output));

		JobClient.runJob(conf2);
		System.exit(0);
	}












}
