package data_join;


import Utils.FileUtil;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

import java.io.*;
import java.net.URI;
import java.util.HashMap;
import java.util.Map;

/*
raw data are stored in two separated files:
file 1 name:
zhangsan 1
lisi 1
wangwu 2
zhaoliu 2
huangqi 1
liuba 1
zhangmanyu 3
wuyanzu 2
liuyifei 2
fanbingbing 1
libingbing 3
wuqilong 2
pengyuyan 3
zhangbaizhi 3
lisisi 3
zhujun 4
huangzongze 4
madong 4
jialing 3
nigemaiti 2

file 2 city:
1 Beijing
2 Shanghai
3 Guangzhou
4 Tokyo

purpose: join name and city with its unique link key
 */
public class MapJoin {

    public static String inputCityPath = "hdfs://localhost:9000/join/join_city";
    public static String inputNamePath = "hdfs://localhost:9000/join/join_name";
    public static String outputPath = "hdfs://localhost:9000/join/output_namecity";

    public static void main(String[] args)
            throws IOException, InterruptedException, ClassNotFoundException {
        Configuration conf = new Configuration();
        FileUtil fileUtil = new FileUtil(conf, outputPath);
        fileUtil.doError(inputCityPath);
        fileUtil.doError(inputNamePath);
        fileUtil.doDelete(outputPath);

        Job job = Job.getInstance(conf, "map-end join");
        job.setJarByClass(MapJoin.class);

        job.setMapperClass(doMapper.class);
        job.setMapOutputKeyClass(Text.class);
        job.setMapOutputValueClass(NullWritable.class);

        job.addCacheFile(URI.create(inputCityPath));
        FileInputFormat.setInputPaths(job, new Path(inputNamePath));
        FileOutputFormat.setOutputPath(job, new Path(outputPath));

        System.exit(job.waitForCompletion(true) ? 0 : 1);
    }

    /*
    basic methodology is to cache smaller into memory,
    cached files will be represented in context object
     */
    public static class doMapper extends Mapper<LongWritable, Text, Text, NullWritable> {

        // one map to store small table's data
        private Map<String, String> map = new HashMap<>();
        private Text text = new Text();

        @Override
        protected void setup(Mapper<LongWritable, Text, Text, NullWritable>.Context context)
                throws IOException, InterruptedException {
            URI[] cacheFiles = context.getCacheFiles();
            String path = cacheFiles[0].getPath();

            // start a local data input stream
            // BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(new FileInputStream(path)));
            // start a hdfs data input stream
            FileSystem fs = FileSystem.get(URI.create(outputPath), context.getConfiguration());
            BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(fs.open(new Path(path))));
            // StringUtils is a hadoop common String toolkit
            String data;
            while ((data = bufferedReader.readLine()) != null) {
                String[] fields = data.trim().split(" ");
                // fields[0] - id
                // fields[1] - city
                map.put(fields[0], fields[1]);
            }
            // IOUtils is a hadoop common connection and resource toolkit
            IOUtils.closeStream(bufferedReader);
        }

        @Override
        protected void map(LongWritable key, Text value, Mapper<LongWritable, Text, Text, NullWritable>.Context context)
                throws IOException, InterruptedException {
            // two split in splits:
            // splits[0] - name
            // splits[1] - id
            String[] splits = value.toString().trim().split(" ");
            // do not store data in java built-in data structure
            // instead, write it in String form, with distinguishable delimiters
            // map - <id, city>
            text.set(splits[0] + '\t' + map.get(splits[1]) + '\t' + splits[1]);
            context.write(text, NullWritable.get());
        }
    }
}
