package com.mango.ch10;

import java.io.IOException;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.jobcontrol.JobControl;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.jobcontrol.ControlledJob;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import com.mango.HDFSTools.HDFSAPI;

public class FilmRecommondJob extends Configured implements Tool {
	static Path inputPath = new Path("/FilmRecommondJob/input");// input

	/**
	 * 第一阶段的map 将movie作为键，userid和评分作为值输出给一阶段的reduce统计movie的总评分人数 输入 是一行文本数据 user
	 * movie pf 输出 (movie,(user,pf))
	 * 
	 * @author Mango
	 *
	 */
	static class Mapper_s1 extends Mapper<LongWritable, Text, Text, S2Pair> {

		@Override
		protected void map(LongWritable key, Text value, Mapper<LongWritable, Text, Text, S2Pair>.Context context)
				throws IOException, InterruptedException {
			// TODO Auto-generated method stub
			String[] tokens = value.toString().split(" ");
			String user = tokens[0];
			String movie = tokens[1];
			String pf = tokens[2];
			System.out.println("s1 map output:" + movie + " " + user + " " + pf);
			context.write(new Text(movie), new S2Pair(user, pf));// 输出格式 （movie，（user，pf））
		}

	}

	/**
	 * 一阶段的reduce 统计某部电影的总评分人数 input :(movie,(user,pf))
	 * output:(user,(movie,pf,sumOfRate))
	 * 
	 * @author Mango
	 *
	 */
	static class Reducer_s1 extends Reducer<Text, S2Pair, Text, C3Pair> {

		@Override
		protected void reduce(Text movie, Iterable<S2Pair> values, Context context)
				throws IOException, InterruptedException {
			// TODO Auto-generated method stu
			// 统计电影的总点赞数
			int sumOfRate = 0;
			List<S2Pair> list = new ArrayList<>();
			for (S2Pair t : values) {
				sumOfRate += 1;
				System.out.println("init value in s1.reduce :" + t);
				S2Pair s2 = new S2Pair();
				s2.set_1(t.get_1());
				s2.set_2(t.get_2());
				list.add(s2);
			}
			for (S2Pair t : list) {
				String userKey = t.get_1();
				int pf = Integer.valueOf(t.get_2());
				C3Pair cp3 = new C3Pair(movie.toString(), pf, sumOfRate);
				System.out.println("s1 reduce output:" + userKey + " " + cp3);
				context.write(new Text(userKey), cp3);
			}

		}
	}

	/**
	 * 二阶段的mapper主要是将数据从hdfs读入，不作任何处理 input:user movie pf sumOfRate
	 * output:(user,(movie,pf,sumOfRate))
	 * 
	 * @author Mango
	 *
	 */
	static class Mapper_s2 extends Mapper<LongWritable, Text, Text, C3Pair> {

		@Override
		protected void map(LongWritable key, Text value, Mapper<LongWritable, Text, Text, C3Pair>.Context context)
				throws IOException, InterruptedException {
			// TODO Auto-generated method stub
			String[] tokens = value.toString().split("\t");
			Text userKey = new Text(tokens[0]);
			String movie = tokens[1];
			int pf = Integer.valueOf(tokens[2]);
			int _sumOfRate = Integer.valueOf(tokens[3]);
			C3Pair tup = new C3Pair(movie, pf, _sumOfRate);
			System.out.println("s2 map输出:" + userKey.toString() + " " + movie + " " + pf + " " + _sumOfRate);
			context.write(userKey, tup);// 输出格式 （user，（movie，pf,_rateNum））
		}

	}

	/**
	 * 二阶段redue主要是将前一阶段产生的数据转换为业务所需的键值对，为后续逻辑提供支持 将每个user评论的多个电影
	 * 数据上进行规约，生成两两电影的新格式的key，value为7个数据的tuple input:(user,(movie,pf,sumOfRate))
	 * output:[(m1,m2),(m1.pf,m1.sumOfRate,m2.pf,m2.sumOfRate,m1*m2,m1*m1,m2*m2)]
	 * 
	 * @author Mango
	 *
	 */
	static class Reducer_s2 extends Reducer<Text, C3Pair, S2Pair, I7Pair> {

		@Override
		protected void reduce(Text userkey, Iterable<C3Pair> values, Context context)
				throws IOException, InterruptedException {
			List<List<C3Pair>> list = generateUniqueCombinations(values);
			for (List<C3Pair> t : list) {
				C3Pair m1 = t.get(0);
				C3Pair m2 = t.get(1);
				System.out.println("生成的值对是：" + m1.get_1() + "-" + m2.get_1());
				// 定义规约器键
				String mv1 = m1.get_1();
				String mv2 = m2.get_1();
				S2Pair reduceKey = new S2Pair(mv1, mv2);
				// 计算额外所需携带的信息
				int pf1 = m1.get_2();
				int pf2 = m2.get_2();
				int sumOfRate1 = m1.get_3();
				int sumOfRate2 = m2.get_3();
				int ratingProduct = m1.get_2() * m2.get_2();
				int rating1Squared = m1.get_2() * m1.get_2();
				int rating2Squared = m2.get_2() * m2.get_2();
				// 定义规约器值
				I7Pair value = new I7Pair(pf1, sumOfRate1, pf2, sumOfRate2, ratingProduct, rating1Squared,
						rating2Squared);
				context.write(reduceKey, value);
			}
		}
	}

	/**
	 * 三阶段的mapper主要是将二阶段的reduce产生的数据 读取并格式化然后输出给三阶段的reduce input：text"(m1,m2)
	 * (m1.pf,m1.sumOfRate,m2.pf,m2.sumOfRate,m1*m2,m1*m1,m2*m2)"
	 * output:[(m1,m2),(m1.pf,m1.sumOfRate,m2.pf,m2.sumOfRate,m1*m2,m1*m1,m2*m2)]
	 * 
	 * @author Mango
	 *
	 */
	static class Mapper_s3 extends Mapper<LongWritable, Text, S2Pair, I7Pair> {

		@Override
		protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
			String[] tokens = value.toString().split("\t");
			S2Pair pairKey = new S2Pair(tokens[0], tokens[1]);
			int pf1 = Integer.valueOf(tokens[2]);
			int sumOfRate1 = Integer.valueOf(tokens[3]);
			int pf2 = Integer.valueOf(tokens[4]);
			int sumOfRate2 = Integer.valueOf(tokens[5]);
			int ratingProduct = Integer.valueOf(tokens[6]);
			int rating1Squared = Integer.valueOf(tokens[7]);
			int rating2Squared = Integer.valueOf(tokens[8]);
			I7Pair i7p = new I7Pair(pf1, sumOfRate1, pf2, sumOfRate2, ratingProduct, rating1Squared, rating2Squared);
			context.write(pairKey, i7p);
		}

	}

	/**
	 * 三阶段reduce主要是按key进行规约数据后，对values里边的数据进行相应的计算：皮尔森相关系数，余玄相关系数，杰卡德相关系数
	 * input:[(m1,m2),(m1.pf,m1.sumOfRate,m2.pf,m2.sumOfRate,m1*m2,m1*m1,m2*m2)]
	 * output:text
	 * 
	 * @author Mango
	 *
	 */
	static class Reducer_s3 extends Reducer<S2Pair, I7Pair, S2Pair, Text> {

		@Override
		protected void reduce(S2Pair key, Iterable<I7Pair> values, Context context)
				throws IOException, InterruptedException {
			int pf1Sum = 0;
			int pf2Sum = 0;
			int dotProduct = 0;
			int rating1NormSquared = 0;
			int rating2NormSquared = 0;
			int maxNumOfumRaterS1 = 0;
			int maxNumOfumRaterS2 = 0;
			int groupSize = 0;
			for (I7Pair t : values) {
				groupSize += 1;
				dotProduct += t.get_5();
				pf1Sum += t.get_1();
				pf2Sum += t.get_3();
				rating1NormSquared += t.get_6();
				rating2NormSquared += t.get_7();
				if (t.get_2() > maxNumOfumRaterS1)
					maxNumOfumRaterS1 = t.get_2();
				if (t.get_4() > maxNumOfumRaterS2)
					maxNumOfumRaterS2 = t.get_4();
			}
			System.out.println("in s3 reduce key:" + key.toString() + "  size:" + groupSize);
			double pearson = calPearsonCorrelation(groupSize, dotProduct, pf1Sum, pf2Sum, rating1NormSquared,
					rating2NormSquared);
			double cosine = calCosineCooelation(dotProduct, rating1NormSquared, rating2NormSquared);
			double jaccard = calJaccardCorrelation(groupSize, maxNumOfumRaterS1, maxNumOfumRaterS2);
			context.write(key, new Text(pearson + "\t" + cosine + "\t" + jaccard));
		}
	}

	public static void main(String[] args) throws Exception {
		int res = ToolRunner.run(new Configuration(), new FilmRecommondJob(), args);
		System.exit(res);

	}

	public static List<List<C3Pair>> generateUniqueCombinations(Iterable<C3Pair> values) {
		// TODO Auto-generated method stub
		List<List<C3Pair>> list = new ArrayList<>();
		List<C3Pair> tmpList = new ArrayList<>();
		// 将iterable中的数据转存到list中
		Iterator<C3Pair> iter = values.iterator();
		while (iter.hasNext()) {
			C3Pair c3 = iter.next();
			System.out.println("in values" + c3);
			C3Pair tc = new C3Pair();
			tc.set_1(c3.get_1());
			tc.set_2(c3.get_2());
			tc.set_3(c3.get_3());
			tmpList.add(tc);
		}
		System.out.println("将value 转存入list中" + tmpList);
		// a b c
		for (int i = 0; i < tmpList.size(); i++)
			for (int j = i + 1; j < tmpList.size(); j++) {
				List<C3Pair> pairs = makePair(tmpList.get(i), tmpList.get(j));
				// if (!list.contains(pairs))
				list.add(pairs);
			}
		return list;
	}

	private static List<C3Pair> makePair(C3Pair c1, C3Pair c2) {
		// TODO Auto-generated method stub
		String c1s = c1.get_1();
		String c2s = c2.get_1();
		List<C3Pair> list = new ArrayList<>();
		if (c1s.compareTo(c2s) < 0) {
			list.add(c1);
			list.add(c2);
		} else {
			list.add(c2);
			list.add(c1);
		}
		return list;
	}

	@Override
	public int run(String[] args) throws Exception {
		// TODO Auto-generated method stub
		Configuration conf = getConf();
		Path step1_input = inputPath;
		Path step1_output = new Path("/FilmRecommondJob/output/s1");
		Path step2_output = new Path("/FilmRecommondJob/output/s2");
		Path step3_output = new Path("/FilmRecommondJob/output/s3");
		Path[] paths = { step1_output, step2_output, step3_output };
		checkPath(paths);
		Job job1 = Job.getInstance(conf);
		job1.setNumReduceTasks(3);
		job1.setJarByClass(FilmRecommondJob.class);
		job1.setMapperClass(FilmRecommondJob.Mapper_s1.class);
		job1.setReducerClass(FilmRecommondJob.Reducer_s1.class);
		job1.setMapOutputKeyClass(Text.class);
		job1.setMapOutputValueClass(S2Pair.class);
		job1.setOutputFormatClass(TextOutputFormat.class);
		job1.setOutputKeyClass(Text.class);
		job1.setOutputValueClass(C3Pair.class);
		FileInputFormat.addInputPath(job1, step1_input);
		FileOutputFormat.setOutputPath(job1, step1_output);

		Job job2 = Job.getInstance(conf);
		job2.setNumReduceTasks(5);
		FileInputFormat.addInputPath(job2, step1_output);
		FileOutputFormat.setOutputPath(job2, step2_output);
		job2.setJarByClass(FilmRecommondJob.class);
		job2.setMapperClass(FilmRecommondJob.Mapper_s2.class);
		job2.setReducerClass(FilmRecommondJob.Reducer_s2.class);
		job2.setMapOutputKeyClass(Text.class);
		job2.setMapOutputValueClass(C3Pair.class);
		job2.setOutputKeyClass(S2Pair.class);
		job2.setOutputValueClass(I7Pair.class);

		Job job3 = Job.getInstance(conf);
		job3.setNumReduceTasks(3);
		FileInputFormat.addInputPath(job3, step2_output);
		FileOutputFormat.setOutputPath(job3, step3_output);
		job3.setJarByClass(FilmRecommondJob.class);
		job3.setMapOutputKeyClass(S2Pair.class);
		job3.setMapOutputValueClass(I7Pair.class);
		job3.setOutputKeyClass(S2Pair.class);
		job3.setOutputValueClass(Text.class);
		job3.setPartitionerClass(S3Patitioner.class);
		job3.setMapperClass(FilmRecommondJob.Mapper_s3.class);
		job3.setReducerClass(FilmRecommondJob.Reducer_s3.class);

		// 控制job执行顺序
		ControlledJob cj1 = new ControlledJob(conf);
		ControlledJob cj2 = new ControlledJob(conf);
		ControlledJob cj3 = new ControlledJob(conf);
		cj1.setJob(job1);
		cj2.setJob(job2);
		cj3.setJob(job3);
		cj2.addDependingJob(cj1);
		cj3.addDependingJob(cj2);
		// 创建jobGroup控制对象
		JobControl jc = new JobControl(this.getClass().getName());
		jc.addJob(cj1);
		jc.addJob(cj2);
		jc.addJob(cj3);
		new Thread(jc).start();
		while (true) {
			for (ControlledJob cj : jc.getRunningJobList()) {
				cj.getJob().monitorAndPrintJob();
			}
			if (jc.allFinished())
				break;
		}
		return 0;
	}

	public static void checkPath(Path[] outPaths) {
		Configuration conf = new Configuration();
		try {
			HDFSAPI hdfs = new HDFSAPI(conf);
			hdfs.createDirectory(inputPath);
			// 先删除已经有的输出文件夹
			for (Path path : outPaths) {
				hdfs.delDirectory(path);
			}
			hdfs.closeFilseSystem();
			// hdfs.orpOver();
		} catch (IOException e1) {
			// TODO Auto-generated catch block
			e1.printStackTrace();
			System.out.println("----------文件操作失败");
		} catch (InterruptedException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		} catch (URISyntaxException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
	}

	/**
	 * 
	 * @param size
	 *            各个向量的长度，即有多少个值集参数数据计算
	 * @param dotProduct
	 *            在电影键值对中 m1的评分*m2的评分
	 * @param rating1Sum
	 *            电影1的总评分人数
	 * @param rating2Sum电影2的总评分人数
	 * @param rating1NormSq
	 *            电影1的评分的平方
	 * @param rating2NormSq
	 *            电影2 的评分的平方
	 * @return
	 */
	static double calPearsonCorrelation(double size, double dotProduct, double rating1Sum, double rating2Sum,
			double rating1NormSq, double rating2NormSq) {
		double result = 0;
		double numerator = size * dotProduct - rating1Sum * rating2Sum;
		double denominator = Math.sqrt(size * rating1NormSq - rating1Sum * rating1Sum)
				* Math.sqrt(size * rating2NormSq - rating2Sum * rating2Sum);
		result = numerator / denominator;
		return result;
	}

	static double calCosineCooelation(double dotProdcut, double rating1Norm, double rating2Norm) {
		return dotProdcut / (rating1Norm * rating2Norm);
	}

	static double calJaccardCorrelation(double inCommon, double totalA, double totalB) {
		double union = totalA + totalB - inCommon;
		return inCommon / union;
	}
}
