package chen.bupt.mapreduce.usermodel;

import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

import chen.bupt.constant.Constants;
import chen.bupt.text.TextUtils;
import chen.bupt.util.HDFSFileUtils;

public class UserModelExtractJob {

	private static String inputPath = Constants.TFIDF_PATH;
	private static String Tmp1 = Constants.TMP1;
	private static String outputPath = Constants.USER_FEATURE_PATH;

	public static class Map1 extends Mapper<LongWritable, Text, Text, Text> {
		public void map(LongWritable key, Text value, Context context)
				throws IOException, InterruptedException {
			if (!HDFSFileUtils.isInputFile(context))
				return;
			int ind = value.toString().indexOf("|");
			String[] docInfo = value.toString().substring(0, ind).split(";");
			String bid = docInfo[1];
			String id = bid + "_" + docInfo[2];
			String reid = bid + "_" + docInfo[3];
			String firstid = bid + "_" + docInfo[4];
			String author = docInfo[5];
			String cont = value.toString().substring(ind + 1);
			context.write(new Text(id), new Text("0|" + author + "|" + cont));
			if (!id.equals(reid)) {
				context.write(new Text(reid), new Text("1|" + author));
			}
			if (!(id.equals(firstid) || "0".equals(firstid) || reid
					.equals(firstid))) {
				context.write(new Text(firstid), new Text("2|" + author));
			}
		}
	}

	public static class Reduce1 extends Reducer<Text, Text, Text, NullWritable> {
		public void reduce(Text key, Iterable<Text> values, Context context)
				throws IOException, InterruptedException {
			String cont = null;
			List<String> authors = new ArrayList<String>();
			for (Text value : values) {
				String tmp = value.toString();
				if (tmp.startsWith("0")) {
					String[] s = TextUtils.splitTextToCont(tmp, "|", 2);
					cont = s[1];
					authors.add(s[0].split("\\|")[1]);
				} else {
					authors.add(tmp.split("\\|")[1]);
				}
			}
			if (cont == null) {
				return;
			} else {
				for (String author : authors) {
					context.write(new Text(author + "|" + cont), NullWritable
							.get());
				}
			}
		}
	}

	public static class Map2 extends Mapper<LongWritable, Text, Text, Text> {
		public void map(LongWritable key, Text value, Context context)
				throws IOException, InterruptedException {
			if (!HDFSFileUtils.isInputFile(context))
				return;
			String s = value.toString();
			int index = s.indexOf("|");
			context.write(new Text(s.substring(0, index)), new Text(s
					.substring(index + 1)));
		}
	}

	public static class Reduce2 extends Reducer<Text, Text, Text, NullWritable> {
		public void reduce(Text key, Iterable<Text> values, Context context)
				throws IOException, InterruptedException {
			int count = 0;
			Map<String, Double> map = new HashMap<String, Double>();
			for (Text value : values) {
				count++;
				String[] terms = value.toString().split("\\|");
				for (int i = 1; i < terms.length; i++) {
					String[] tmp = terms[i].split(";");
					if (tmp.length != 2)
						continue;
					if (map.containsKey(tmp[0])) {
						map.put(tmp[0], map.get(tmp[0])
								+ Double.parseDouble(tmp[1]));
					} else {
						map.put(tmp[0], Double.parseDouble(tmp[1]));
					}
				}
			}
			StringBuffer sb = new StringBuffer();
			sb.append(key.toString());
			//TODO 需要修改分数公式达到更好的效果 
			for (String mapkey : map.keySet()) {
				sb.append("|" + mapkey + ";" + map.get(mapkey) / count);
			}
			context.write(new Text(sb.toString()), NullWritable.get());
		}
	}

	/**
	 * @param args
	 * @throws Exception
	 */
	public static void main(String[] args) throws Exception {
		Configuration conf1 = new Configuration();
		conf1.set("mapred.task.timeout", "0");
		Path input = new Path(inputPath);
		Path tmp = new Path(Tmp1);
		Path output = new Path(outputPath);
		HDFSFileUtils.deleteFile(tmp, conf1);
		HDFSFileUtils.deleteFile(output, conf1);

		Job job1 = new Job(conf1, "usermodel1");
		job1.setJarByClass(UserModelExtractJob.class);
		job1.setMapperClass(Map1.class);
		job1.setReducerClass(Reduce1.class);
		job1.setOutputKeyClass(Text.class);
		job1.setOutputValueClass(Text.class);
		FileInputFormat.addInputPath(job1, input);
		FileOutputFormat.setOutputPath(job1, tmp);
		job1.waitForCompletion(true);

		Configuration conf2 = new Configuration();
		conf2.set("mapred.task.timeout", "0");

		Job job2 = new Job(conf2, "usermodel2");
		job2.setJarByClass(UserModelExtractJob.class);
		job2.setMapperClass(Map2.class);
		job2.setReducerClass(Reduce2.class);
		job2.setOutputKeyClass(Text.class);
		job2.setOutputValueClass(Text.class);
		FileInputFormat.addInputPath(job2, tmp);
		FileOutputFormat.setOutputPath(job2, output);
		job2.waitForCompletion(true);
		HDFSFileUtils.deleteFile(tmp, conf2);
	}

}