package RegionAveMoney;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.FloatWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;

import java.io.IOException;

public class regionAveMoneyDriver {

    public static class Map extends Mapper<Object , Text , Text , FloatWritable>{
        private static Text newKey=new Text();
        public void map(Object key,Text value,Context context) throws IOException, InterruptedException{
            //将输入的纯文本文件数据转化成string
            String[] splits = value.toString().split(",");
            String newKey=splits[16];
            //String a=String.valueOf('A');
            float click=Float.parseFloat(splits[10]);
            //将数据和值输入到reduce处理
            //if (newKey.equals(a))
            context.write(new Text(newKey), new FloatWritable(click));

        }
    }
    public static class Reduce extends Reducer<Text, FloatWritable, Text, FloatWritable>{
        public void reduce(Text key,Iterable<FloatWritable> values,Context context) throws IOException, InterruptedException{
            float num=0;
            int count=0;
            for(FloatWritable val:values){
                //每个元素求和num
                num+=val.get();
                //统计元素的次数count
                count++;
            }
            //统计次数
            float avg=num/count;
            context.write(key,new FloatWritable(avg));
        }
    }
    public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException{
        Configuration conf=new Configuration();
        System.out.println("start");
        Job job =new Job(conf,"MyAverage");
        job.setJarByClass(regionAveMoneyDriver.class);
        job.setMapperClass(Map.class);
        job.setReducerClass(Reduce.class);
        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(FloatWritable.class);
        job.setInputFormatClass(TextInputFormat.class);
        job.setOutputFormatClass(TextOutputFormat.class);

        Path out=new Path("E:\\hadoop_develop\\output3");
        FileInputFormat.setInputPaths(job,"E:\\hadoop_develop\\train.csv");
        FileOutputFormat.setOutputPath(job,out);
        System.exit(job.waitForCompletion(true) ? 0 : 1);

    }
}