package product;

import java.util.ArrayList;
import java.util.Comparator;
import java.util.HashSet;
import java.util.Iterator;
import java.util.TreeMap;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

public class top3 {

	/**
	 * @param args
	 */
	public static void main(String[] args) throws Exception{
		// TODO Auto-generated method stub
		if(args.length!=3){
			System.err.println("path err!");
			System.exit(0);
		}
		Job job1 = new Job(new Configuration(), "ProductDemo");
		job1.setJarByClass(top3.class);
		
		FileInputFormat.setInputPaths(job1, new Path(args[0]));
		FileOutputFormat.setOutputPath(job1, new Path(args[1]));
		
		job1.setMapperClass(stmaps.class);
		job1.setReducerClass(streduce.class);
		
		job1.setOutputKeyClass(Text.class);
		job1.setOutputValueClass(Text.class);
		
		job1.waitForCompletion(true); //一个job完了要写这句完成句
		
		Job job2 = new Job(new Configuration(), "ProductDemo");
		job2.setJarByClass(top3.class);
		
		FileInputFormat.setInputPaths(job2, new Path(args[1]));
		FileOutputFormat.setOutputPath(job2, new Path(args[2]));
		
		job2.setMapperClass(sortmaps.class);
		job2.setReducerClass(sortreduce.class);
		//mapout 和reduceout数据类型一样的时候，只写一个reduceout类型就完了
		job2.setMapOutputKeyClass(IntWritable.class);
		job2.setMapOutputValueClass(Text.class);
		
		job2.setOutputKeyClass(Text.class);
		job2.setOutputValueClass(Text.class);
		
		job2.waitForCompletion(true);

	}
	public static class stmaps extends Mapper<LongWritable,Text,Text,Text>{
		protected void map(LongWritable key, Text value, org.apache.hadoop.mapreduce.Mapper<LongWritable,Text,Text,Text>.Context context) throws java.io.IOException ,InterruptedException {
			String[] lines = value.toString().split("\t");
			if (lines.length==6) {
				String type = lines[0].trim();
				String provice = lines[4].trim();
				context.write(new Text(provice), new Text(type));
			}
		};
	}
	public static class streduce extends Reducer<Text,Text,Text,Text>{
		HashSet<String> sets=new HashSet<>();
		protected void reduce(Text k2, java.lang.Iterable<Text> value, org.apache.hadoop.mapreduce.Reducer<Text,Text,Text,Text>.Context context) throws java.io.IOException ,InterruptedException {
			for (Text typeinfo : value) {
				sets.add(typeinfo.toString().trim()+"\t");
			}
			String infosString=k2.toString()+"\t"+sets.toString();
			context.write(new Text(sets.size()+"\t"), new Text(infosString));
		};
	}
	public static class sortmaps extends Mapper<LongWritable, Text, IntWritable, Text>{
		protected void map(LongWritable key, Text value, org.apache.hadoop.mapreduce.Mapper<LongWritable,Text,IntWritable,Text>.Context context) throws java.io.IOException ,InterruptedException {
			String[] lines = value.toString().split("\t");
			if(lines.length==3){
				//map输出中间结果:<3,青海\t玉米,粳米,面粉>
				context.write(new IntWritable(Integer.parseInt(lines[1])),
                            new Text(lines[0].trim()+"\t"+lines[2].trim()));
			}
		};
	}
	public static class sortreduce extends Reducer<IntWritable,Text, Text, Text>{
		private static TreeMap<Integer, String> tm=new TreeMap<Integer,String>(
				new Comparator<Integer>() {
					//通过重写compare方法，实现逆序排列
					@Override
					public int compare(Integer o1, Integer o2) {
						// TODO Auto-generated method stub
						return o2.compareTo(o1);
					}
				});
		protected void reduce(IntWritable k2, java.lang.Iterable<Text> values, org.apache.hadoop.mapreduce.Reducer<IntWritable,Text,Text,Text>.Context arg2) throws java.io.IOException ,InterruptedException {
			ArrayList<String> list = new ArrayList<String>();
			for (Text value : values) {
				//value的内容：<省份\t多个农产品的字符串>，例如：山东\t玉米，面粉，大白菜.....
				list.add(value.toString());
			}
			tm.put(k2.get(), list.toString());
			if(tm.size()>3){
				tm.remove(tm.lastKey());
			}
		};
		protected void cleanup(org.apache.hadoop.mapreduce.Reducer<IntWritable,Text,Text,Text>.Context context) throws java.io.IOException ,InterruptedException {
			Iterator<Integer> iterator = tm.keySet().iterator();
			String []type1=null;
			String []type2=null;
			String []type3=null;
			int count=0;
			while (iterator.hasNext()) {
				count++;
				if(count==1){
					//山东\t玉米，面粉，大白菜....
					type1=tm.get(iterator.next()).split("\t")[1].split(",");
				}else if(count==2){
					type2=tm.get(iterator.next()).split("\t")[1].split(",");
				}else if(count==3){	
					type3=tm.get(iterator.next()).split("\t")[1].split(",");
				}
			}
			ArrayList<String> arrayList = new ArrayList<String>();
			for (int i = 0; i < type1.length; i++) {
				for (int j = 0; j < type2.length; j++) {
					for (int j2 = 0; j2 < type3.length; j2++) {
						if(type1[i].equals(type2[j])&&type2[j].equals(type3[j2])){
							arrayList.add(type1[i]);
						}
					}
				}
			}
			context.write(new Text(arrayList.toString()), new Text(""));
		};
	}
	

}
