package yeliuhuishi.Join.MapJoin;


import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import yeliuhuishi.File.DelAllFile;
import yeliuhuishi.Join.Info;

import java.io.BufferedReader;
import java.io.File;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.URI;
import java.util.HashMap;
import java.util.Map;


/**
 * Map端实现join
 * 1、思路：
 * 适用于关联中有小表的情形
 * 可以将小表分发到所有的map节点，然后可以在于本所读到的大表数据join并输出最终结果
 * 2、优缺点：
 * 大大的提高了join的并发，速度快
 */
public class FileMapJoin extends Mapper<LongWritable, Text, Info, NullWritable> {
    Map<String, Info> pdInforMap = new HashMap<String, Info>();

    Info info = new Info();

    public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException {
        String input = "src/main/java/yeliuhuishi/Join/MapJoin/input/page_click_log.txt";
        String output = "src/main/java/yeliuhuishi/Join/MapJoin/output";
        DelAllFile.delAllFile(new File(output));

        Configuration conf = new Configuration();
        Job job = Job.getInstance(conf, FileMapJoin.class.getName());

        job.setJarByClass(FileMapJoin.class);
        job.setMapperClass(FileMapJoin.class);

        job.setMapOutputKeyClass(Info.class);
        job.setMapOutputValueClass(NullWritable.class);

        FileInputFormat.setInputPaths(job, new Path(input));
        FileOutputFormat.setOutputPath(job, new Path(output));

        job.setNumReduceTasks(0);

        System.out.println(job.waitForCompletion(true) ? "成功" : "失败");
    }

    @Override
    protected void setup(Context context) throws IOException {
        String cacheFile = "src/main/java/yeliuhuishi/Join/MapJoin/input/product_info.txt";
        FileSystem fileSystem = FileSystem.get(URI.create(cacheFile), context.getConfiguration());
        FSDataInputStream fdis = fileSystem.open(new Path(cacheFile));
        BufferedReader br = new BufferedReader(new InputStreamReader(fdis));
        String line;
        while ((line = br.readLine()) != null) {

            String[] fields = line.split(",");
            info.setpId(fields[0]);
            info.setpName(fields[1]);
            info.setPrice(Float.parseFloat(fields[2]));
            info.setProduceArea(fields[3]);
            info.setFlag("1");
            pdInforMap.put(fields[0], info);
        }
        br.close();
    }

    // 处理用户日志表
    @Override
    protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
        Info bean = new Info();
        String[] split = value.toString().split(",");
        String pid = split[1];

        bean.setpId(split[1]);
        bean.setpName(pdInforMap.get(pid).getpName());
        bean.setPrice(pdInforMap.get(pid).getPrice());
        bean.setProduceArea(pdInforMap.get(pid).getProduceArea());
        bean.setuId(split[0]);
        bean.setDateStr(split[2]);
        bean.setClickArea(split[3]);
        context.write(bean, NullWritable.get());
    }
}
