//分区,排序相关示例 
package com.servlet;
import java.io.*;
import java.net.URI;
import java.util.*;
import javax.servlet.*;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.*;
import javax.servlet.http.HttpServletResponse;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import org.apache.hadoop.io.*;
import org.apache.hadoop.mapreduce.*;
import org.apache.hadoop.mapreduce.lib.input.*;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import com.beans.*;
import com.constant.Constant;
import com.dao.impl.HdfsDaoImpl;
import com.servlet.MR_PartSortServlet.TotalMapper.*;

@WebServlet("/MR_PartSortServlet")
public class MR_PartSortServlet extends HttpServlet {

	protected void service(HttpServletRequest request, HttpServletResponse response)
			throws ServletException, IOException {	
		try {
			UserInfo user=(UserInfo)request.getSession().getAttribute("session_user");
			String userRoot=user.getUserName();
			
			// 创建一个作业
			Job job = Job.getInstance();
			
			// 设定map 相关的配置
			job.setMapperClass(TotalMapper.class);
			job.setMapOutputKeyClass(Text.class);
			job.setMapOutputValueClass(ScoreInfo.class);
	
			//指定要处理的文件是谁
			String filePath = request.getParameter("filePath");
			FileInputFormat.setInputPaths(job, new Path(Constant.HDFS_PATH + filePath));
			
			// 设定 reduce 相关的配置
			job.setReducerClass(TotalReduce.class);
			job.setOutputKeyClass(Text.class);
			job.setOutputValueClass(ScoreInfo.class);
			
			
			//定义聚合后的结果存到什么地方
			Path savePath=new Path(Constant.HDFS_PATH+userRoot+"/part_sort_tmp");
			
			// 因为如果目标目录存在,将出错,所以可以先将目标删除
			URI uri = new URI(Constant.HDFS_PATH );
			//Configuration conf = new Configuration();
			FileSystem fs = FileSystem.get(uri, Constant.CONF,"root");
			fs.delete(savePath, true);

			// 保存计算结果
			FileOutputFormat.setOutputPath(job, savePath);
			
			job.waitForCompletion(true);
			
			//执行完上面的动作以后,会将每个用户的分数,进行聚合汇总,但没有排序
			//如果要排序,还要进行下面的处理
			Job job2 = Job.getInstance();
			job2.setMapperClass(SortMapper.class);
			job2.setMapOutputKeyClass(ScoreInfo.class);
			job2.setMapOutputValueClass(Text.class);
			
			FileInputFormat.setInputPaths(job2, savePath+"/part-r-00000");
			
			Path resultPath=new Path(Constant.HDFS_PATH+userRoot+"/part_sort_result");
			fs.delete(resultPath,true);
			FileOutputFormat.setOutputPath(job2,resultPath);
			
			//设置开启分区
			job2.setNumReduceTasks(5);
			job2.setPartitionerClass(MyPartition.class);
			
			boolean result=job2.waitForCompletion(true);
			
			//提取出处理完的数据
			List<List<ScoreInfo>> dataList=new ArrayList<>();
			
			for(int i=0;i<5;i++) {
				Path path=new Path(Constant.HDFS_PATH+userRoot+"/part_sort_result/part-r-0000"+i);
				
				FSDataInputStream fsInput=fs.open(path);
				
				BufferedReader br =new BufferedReader(new InputStreamReader(fsInput,"UTF-8"));
				List<ScoreInfo> scoreInfoList=new ArrayList<>();
				
				String str=null;
				while((str=br.readLine())!=null) {
					//str的内容: 地址 name score gender idCard
					
					String [] data=str.split("\t");
					
					String address=data[0];
					String name=data[1];
					Integer score=Integer.parseInt(data[2]);
					String gender=data[3];
					String idCard=data[4];
					
					ScoreInfo info =new ScoreInfo(idCard,address,name,score,gender);
					info.setAddress(address);
					info.setName(name);
					info.setScore(score);
					info.setGender(gender);
					info.setIdCard(idCard);
					
					
					scoreInfoList.add(info);
				}
				dataList.add(scoreInfoList);
				
				br.close();
				fsInput.close();
			}
				

			fs.close();
			
			request.setAttribute("dataList", dataList);

			request.getRequestDispatcher("/mapreduce/partsort-result.jsp").forward(request, response);
			
		}
		
		catch(Exception e) {
			e.printStackTrace();
		}
		

	}

	public static class TotalMapper extends Mapper<LongWritable, Text, Text, ScoreInfo> {
		ScoreInfo info = new ScoreInfo();
		Text k2 = new Text();

		protected void map(LongWritable key, Text value, Mapper<LongWritable, Text, Text, ScoreInfo>.Context context)
				throws IOException, InterruptedException {

			String line = value.toString();
			String[] data = line.split("\t");


			info.setIdCard(data[0]);
			info.setAddress(data[1]);
			info.setName(data[2]);
			info.setScore(Integer.parseInt(data[3]));
			info.setGender(data[4]);
			k2.set(info.getIdCard());
			context.write(k2, info);
		}
		
		
	 public static class TotalReduce extends Reducer<Text, ScoreInfo, Text, ScoreInfo> {
			int totalScore = 0;
			ScoreInfo info;

			protected void reduce(Text key, Iterable<ScoreInfo> values, Context context) throws IOException, InterruptedException {

				int i = 0;
				for (ScoreInfo o : values) {
					// 因为 其他信息也要输出去，这里假设id相同的用户，其他信息完全相同
					if (i == 0) {
						info = o;
					}
					totalScore += o.getScore();
					i++;
				}

				info.setScore(totalScore);

				context.write(key, info);
				
				totalScore=0;
			}
		}
		
		
		public static class SortMapper extends Mapper<LongWritable, Text, ScoreInfo, Text> {
			ScoreInfo info = new ScoreInfo();
			Text k2 = new Text();

			protected void map(LongWritable key, Text value, Context context)
					throws IOException, InterruptedException {

				String line = value.toString();
				String[] data = line.split("\t");

				info.setIdCard(data[0]);
				info.setAddress(data[1]);
				info.setName(data[2]);
				info.setScore(Integer.parseInt(data[3].trim()));
				info.setGender(data[4]);

				k2.set(info.getIdCard());
				
				context.write(info,k2 );	
			}
		}
		
		//设置分区器
		public static class MyPartition extends Partitioner<ScoreInfo,Text>{
			public int getPartition(ScoreInfo key , Text value , int numPartitions) {
				String addr=key.getAddress();
				
				if(addr.equals("北京")) {
					return 0;
				}
				if(addr.equals("上海")) {
					return 1;
				}
				if(addr.equals("广州")){
					return 2;
				}
				if(addr.equals("深圳")) {
					return 3;
				}
				else {
					return 4;
				}	
			}	
		}
	}
}