package com.zhiyou.bd14;

import java.io.IOException;

import org.apache.avro.mapred.AvroKey;
import org.apache.avro.mapreduce.AvroJob;
import org.apache.avro.mapreduce.AvroKeyInputFormat;
import org.apache.avro.mapreduce.AvroKeyOutputFormat;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

//计算每个省份的用户对系统的访问次数
public class ReadAvroInput {
		public static class ReadAvroInputMap extends Mapper<AvroKey<UserActionLog>, NullWritable, Text, IntWritable>{
			private final IntWritable ONE = new IntWritable(1);
			private Text outKey = new Text();
			private UserActionLog keyData;
			@Override
			protected void map(AvroKey<UserActionLog> key, NullWritable value,
					Mapper<AvroKey<UserActionLog>, NullWritable, Text, IntWritable>.Context context)
					throws IOException, InterruptedException {
				//从封装类AvroKey中把UserActionLog对象获取出来
				keyData = key.datum();
				if(keyData.getActionType().equals("login")){
					outKey.set(keyData.getProvience().toString());
					context.write(outKey, ONE);
				}
				
			}
		}
		public static class ReadAvroInputReduce extends Reducer<Text, IntWritable, Text, IntWritable>{
			private int sum;
			private IntWritable oValue = new IntWritable();
			@Override
			protected void reduce(Text key, Iterable<IntWritable> values,
					Reducer<Text, IntWritable, Text, IntWritable>.Context context)
					throws IOException, InterruptedException {
			sum = 0;
			for (IntWritable value : values) {
				sum +=value.get();
			}
			oValue.set(sum);
			context.write(key, oValue);
			}
		}
		public static void main(String[] args) throws Exception {
			Configuration conf = new Configuration();
			Job job = Job.getInstance(conf);
			job.setJarByClass(ReadAvroInput.class);
			job.setJobName("AvroKey");
			
			job.setMapperClass(ReadAvroInputMap.class);
			job.setReducerClass(ReadAvroInputReduce.class);
			
			job.setOutputKeyClass(Text.class);
			job.setOutputValueClass(IntWritable.class);
			//设置输入格式
			job.setInputFormatClass(AvroKeyInputFormat.class);
			//设置输入avro文件的读取模式
			AvroJob.setInputKeySchema(job, UserActionLog.getClassSchema());
			
			FileInputFormat.addInputPath(job,new Path("/bd14/output/ReduceJoin"));
			Path outPath = new Path("/bd14/output/avroinput");
			outPath.getFileSystem(conf).delete(outPath,true);
			FileOutputFormat.setOutputPath(job,outPath );
			System.exit(job.waitForCompletion(true)?0:1);
		}
}
