package com.zhiyou.bd14;

import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;

import org.apache.avro.mapred.AvroKey;
import org.apache.avro.mapreduce.AvroJob;
import org.apache.avro.mapreduce.AvroKeyOutputFormat;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.FileSplit;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;


public class ReduceJoin {
	public static class ValuewithFlag implements Writable{
		private String value;
		private String flag;
		public String getValue() {
			return value;
		}
		public void setValue(String value) {
			this.value = value;
		}
		public String getFlag() {
			return flag;
		}
		public void setFlag(String flag) {
			this.flag = flag;
		}
		@Override
		public void readFields(DataInput in) throws IOException {
			this.value = in.readUTF();
			this.flag = in.readUTF();
		}
		@Override
		public void write(DataOutput out) throws IOException {
			out.writeUTF(value);
			out.writeUTF(flag);
		}
	}
	public static class PeduceJoinMap extends Mapper<LongWritable, Text, Text, ValuewithFlag>{
		private FileSplit inputSplit;
		private String filename;
		private String[] infos;
		private Text oKey =  new Text();
		private ValuewithFlag oValue = new ValuewithFlag();
		@Override
		protected void setup(Mapper<LongWritable, Text, Text, ValuewithFlag>.Context context)
				throws IOException, InterruptedException {
			inputSplit = (FileSplit)context.getInputSplit();
			if(inputSplit.getPath().toString().contains("user-logs-large.txt")){
				filename = "userLogsLarge";
			}else if(inputSplit.getPath().toString().contains("user_info.txt")) {
				filename = "userInfo";
			}
		}

		@Override
		protected void map(LongWritable key, Text value,
				Mapper<LongWritable, Text, Text, ValuewithFlag>.Context context)
				throws IOException, InterruptedException {
			oValue.setFlag(filename);
			infos = value.toString().split("\\s");
			if(filename.equals("userLogsLarge")){
				oKey.set(infos[0]);
				oValue.setValue(infos[1]+"\t"+infos[2]);
			}else if(filename.equals("userInfo")){
				oKey.set(infos[0]);
				oValue.setValue(infos[1]+"\t"+infos[2]);
			}
			context.write(oKey, oValue);
		}
	}
	public static class PeduceJoinReduce extends Reducer<Text, ValuewithFlag, AvroKey<UserActionLog>, NullWritable>{
		private List<String> userLogsLargeList;
		private List<String> userInfoList;
		private NullWritable oValue = NullWritable.get();
		private AvroKey<UserActionLog> oKey = new AvroKey<UserActionLog>();
		private String[] infos;
		@Override
		protected void reduce(Text key, Iterable<ValuewithFlag> values,
				Reducer<Text, ValuewithFlag, AvroKey<UserActionLog>, NullWritable>.Context context) throws IOException, InterruptedException {
			userLogsLargeList = new ArrayList<String>();
			userInfoList = new ArrayList<String>();
			for (ValuewithFlag value : values) {
				if(value.getFlag().equals("userLogsLarge")){
					userLogsLargeList.add(value.getValue());
				}else if(value.getFlag().equals("userInfo")){
					userInfoList.add(value.getValue());
				}
			}
			for (String  userLogsLarg : userLogsLargeList) {
				for(String userInfo: userInfoList){
					UserActionLog.Builder builder = UserActionLog.newBuilder();
					infos = userLogsLarg.split("\\s");
					builder.setActionType(infos[0]);
					builder.setIpAddres(infos[1]);
					infos = userInfo.split("\\s");
					if(infos[0].equals("man")){
						builder.setGender(1);
					}else{
						builder.setGender(0);
					}
					builder.setProvience(infos[1]);
					builder.setUserName(key.toString());
					UserActionLog userActionLog = builder.build();
					oKey.datum(userActionLog);
					context.write(oKey, oValue);
				}
			}
		}
	}
	public static void main(String[] args) throws Exception, ClassNotFoundException, InterruptedException {
		Configuration conf = new Configuration();
		Job job = Job.getInstance(conf);
		job.setJarByClass(ReduceJoin.class);
		job.setJobName("redece关联");
		
		job.setMapperClass(PeduceJoinMap.class);
		job.setReducerClass(PeduceJoinReduce.class);
		
		job.setMapOutputKeyClass(Text.class);
		job.setMapOutputValueClass(ValuewithFlag.class);
		job.setOutputKeyClass(AvroKey.class);
		job.setOutputValueClass(NullWritable.class);
		
		job.setOutputFormatClass(AvroKeyOutputFormat.class);
		AvroJob.setOutputKeySchema(job, UserActionLog.SCHEMA$);
		
		FileInputFormat.addInputPath(job,new Path("/tongji/user-logs-large.txt"));
		FileInputFormat.addInputPath(job,new Path("/tongji/join/user_info.txt"));
		Path outPath = new Path("/bd14/output/ReduceJoin");
		outPath.getFileSystem(conf).delete(outPath,true);
		FileOutputFormat.setOutputPath(job,outPath );
		System.exit(job.waitForCompletion(true)?0:1);
	}
}
