package kmeans.mapreduce;


import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;
import kmeans.bean.WXKmeansData;
import kmeans.exception.WXKmeansException;

import java.io.IOException;
import java.text.DecimalFormat;

public class WXKmeansReduce extends Reducer<IntWritable, WXKmeansData, IntWritable, Text> {
    private int dimension;

    protected void setup(Context context) throws IOException, InterruptedException {
        Configuration conf = context.getConfiguration();
        dimension = Integer.valueOf(conf.get("dimension"));
        System.out.println("reduce过程初始化成功" + conf.get("dimension"));
    }

    protected void reduce(IntWritable key, Iterable<WXKmeansData> values, Context context) throws IOException, InterruptedException {
        System.out.println("reduce 进行中 " + key.get());
        WXKmeansData newcentroid = new WXKmeansData(new Text("0 0 0"), new IntWritable(0));
        System.out.println(newcentroid.getkMeansText().toString());
        for (WXKmeansData kMeansData : values) {
            try {
                newcentroid = WXKmeansData.add(newcentroid, kMeansData, dimension);
            } catch (WXKmeansException e) {
                e.printStackTrace();
            }
        }
        String[] newcentroidvalue = newcentroid.getkMeansText().toString().trim().split(" ");
        int datasize = newcentroid.getDataSize().get();
        StringBuffer r = new StringBuffer();

        for (String s : newcentroidvalue) {
            double d = Double.parseDouble(s) / datasize;
            DecimalFormat df = new DecimalFormat("0.00");
            r.append(df.format(d)).append(" ");
        }
        context.write(key, new Text(r.toString().trim()));
    }

    @Override
    protected void cleanup(Context context) throws IOException, InterruptedException {
        super.cleanup(context);
    }
}
