package MapReduce;

import java.io.IOException;

import java.util.Iterator;
import java.util.StringTokenizer;
import java.util.regex.Pattern;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.FloatWritable;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.*;
import org.apache.hadoop.mapreduce.lib.output.*;



// 如导入/usr/local/hadoop/share/hadoop/yarn下jar包，则可直接运行生成out文件夹
public class AverageScore {
	public static class Map extends
			Mapper<LongWritable, Text, Text, IntWritable> {
		public void map(LongWritable key, Text value, Context context)
				throws IOException, InterruptedException {
			// 每行按空格切片
			StringTokenizer tokenizer = new StringTokenizer(value.toString());
			while (tokenizer.hasMoreElements()) {
				// 获取学号
				String stuId = tokenizer.nextToken();
				String str = tokenizer.nextToken();
				// 获取成绩
				Integer stuScore = Integer.parseInt(str);
				Text sId = new Text(stuId);
				// 学号为key，各科成绩为value-list
				context.write(sId, new IntWritable(stuScore));

			}

		}
	}

	public static class Reduce extends
			Reducer<Text, IntWritable, Text, FloatWritable> {
		public void reduce(Text key, Iterable<IntWritable> values,
				Context context) throws IOException, InterruptedException {
			// 记录总成绩
			float sumScore = 0;
			// 记录科目数量
			int coursesCount = 0;
			// 记录平均分
			float avgerage = 0;
			// 迭代value-list
			Iterator<IntWritable> iterator = values.iterator();

			while (iterator.hasNext()) {
				// 每科分数累加
				sumScore = iterator.next().get() + sumScore;
				// 科目数计数
				coursesCount++;
			}
			// 求均分
			//avgerage = sumScore / coursesCount;
			// 学号为key，均分为value
			//context.write(key, new FloatWritable(avgerage));

			// 学号为key，总分为value
			context.write(key, new FloatWritable(sumScore));
		}
	}
	
	
	public static boolean isNum(String inp) {
		Pattern pattern = Pattern.compile("^-?\\d+(\\.\\d+)?$");
		return pattern.matcher(inp).matches();
	}

	public static void main(String[] args) throws IOException,
			ClassNotFoundException, InterruptedException {
		Configuration conf = new Configuration();
		conf.set("fs.defaultFS", "hdfs://localhost:9000");
		String[] otherArgs = new String[] { "inp1", "out1" };

		FileSystem fs = FileSystem.get(conf);
		String output = "/user/hadoop/out1";
		Path outputPath = new Path(output);
		// 每次运行删除上一个out文件夹
		if (fs.exists(outputPath)) {
			fs.delete(outputPath, true);
		}

		if (otherArgs.length != 2) {
			System.err.println("Usage:Merge and duplicate removal<in><out>");
			System.exit(2);
		}
		Job job = Job.getInstance(conf, "平均成绩");
		job.setJarByClass(AverageScore.class);
		job.setMapperClass(Map.class);
		job.setReducerClass(Reduce.class);
		job.setOutputKeyClass(Text.class);
		job.setOutputValueClass(IntWritable.class);
		FileInputFormat.addInputPath(job, new Path(otherArgs[0]));
		FileOutputFormat.setOutputPath(job, new Path(otherArgs[1]));
		System.exit(job.waitForCompletion(true) ? 0 : 1);
	}

}