package com.lkinga.www.DAO;


import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;

import com.lkinga.www.Model.JobInitModel;
import com.lkinga.www.Model.user;
import com.lkinga.www.Util.BaseDriver;
import com.lkinga.www.Util.HadoopUtil;



import org.apache.hadoop.io.IntWritable;

import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
import java.util.StringTokenizer;


/**
 * Created by xiaohei on 16/3/9.

 */
public class userrecommend {

	private static class Job2Mapper extends Mapper<LongWritable, Text, Text, Text> {
Text k = new Text();
Text v = new Text();
int a=0;


	    @Override
	    protected void setup(Mapper<LongWritable, Text, Text, Text>.Context context)
	            throws IOException, InterruptedException {
	        super.setup(context);

	    };
	    @Override
	    public void map(LongWritable key,
	                    Text value,
	                    Context context)
	            throws IOException, InterruptedException {
	    	
	    	 	String[] strArr = value.toString().split(",");
	            
	            k.set(strArr[0]);
	            for(int a=1;a<strArr.length;a++) {
	            v.set("data:"+a+","+strArr[a]);
	            context.write(k, v);
	         //  System.out.println("map"+k.toString()+"   "+v.toString());
	            }
//	           
	          
	            
	            for(int a=1;a<=10;a++) {
	            	v.set("user:"+a+","+user.get(a));
	            	 System.out.println("map"+k.toString()+"   "+v.toString());
	            	context.write(k, v);
	            }
//	    	System.out.println("aaa");
	    	
	    	
	    }
	    @Override
	    protected void cleanup(Mapper<LongWritable, Text, Text, Text>.Context context)
	            throws IOException, InterruptedException {
	    	
	        super.cleanup(context);
	     
	    }
	}
	private static class Job2Reducer extends Reducer<Text, Text, Text, Text> {
	   
		float a=160;
		int use=0;
		@Override
	  
	    protected void setup(Reducer<Text, Text, Text, Text>.Context context)
	            throws IOException, InterruptedException {
	        super.setup(context);
	        
	    }
	    @Override
	    public void reduce(Text key,
	                       Iterable<Text> values,
	                       Context context)
	            throws IOException, InterruptedException {
	    	float pr = 0f;//推荐度
	    	// System.out.println(key.toString());
           
            Map<Integer, Integer> user = new HashMap<Integer, Integer>();
            
            Map<Integer,  Integer> data = new HashMap<Integer,  Integer>();
           
            for (Text value : values) {
                String valueStr = value.toString();
               // System.out.println(valueStr);
                String[] kv = HadoopUtil.SPARATOR.split(valueStr.split(":")[1]);
                if (valueStr.startsWith("data")) {
                   data.put(Integer.parseInt(kv[0]), Integer.valueOf(kv[1]));
                } else {
                    user.put(Integer.parseInt(kv[0]), Integer.valueOf(kv[1]));
                }
               
            }
             
            
            
            
            
            
            
          
            for (Map.Entry<Integer,  Integer> entry : data.entrySet()) {
            	if(entry.getValue()!=0&&user.get(entry.getKey())!=0) {
            		//System.out.println(key.toString()+" key"+entry.getKey()+"   "+entry.getValue()+"    "+user.get(entry.getKey()));
                pr += Math.pow(entry.getValue() - user.get(entry.getKey()),2);
            	}
                if(entry.getValue()==0) {
                	pr+=user.get(entry.getKey());
                }
                
            }
            pr=1.0f/(1+pr);
            
           System.out.println("key:  "+key+"pr:   "+pr);
            context.write(key, new Text(pr+"")); 
	        
	       
	    }
	    
	    
	    @Override
        protected void cleanup(Reducer<Text, Text, Text, Text>.Context context)
                throws IOException, InterruptedException {
            super.cleanup(context);
        
           
        }
	}
    	  public static void run() throws ClassNotFoundException, IOException, InterruptedException {
        Configuration conf = new Configuration();
        String inPath = HadoopUtil.HDFS + "/data/data/movie.csv";
        String outPath = HadoopUtil.HDFS + "/data/output/test";
        JobInitModel job = new JobInitModel(new String[]{inPath}, outPath, conf, null, "AdjacencyMatrix", userrecommend.class
                , null, Job2Mapper.class, Text.class, Text.class, null, null
                , Job2Reducer.class, Text.class, Text.class);
        BaseDriver.initJob(new JobInitModel[]{job});
     
    }
}
