package com.shujia.MR;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.RecordWriter;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.Hashtable;

public class MapReduce01 {
    // Map端
    public static class MyMapper extends Mapper<LongWritable, Text, Text, NullWritable> {
        // MapJoin
        Hashtable<String, String> ht = new Hashtable<>();

        // 每个Map任务会执行一次 加载小表：huilv数据
        @Override
        protected void setup(Mapper<LongWritable, Text, Text, NullWritable>.Context context) throws IOException {
            FileSystem fs = FileSystem.get(context.getConfiguration());
            FSDataInputStream fsDataInputStream = fs.open(new Path("/data/MR/input2/huilv.csv"));
            BufferedReader br = new BufferedReader(new InputStreamReader(fsDataInputStream));
            String line;
            while ((line = br.readLine()) != null) {
                // 以货币ID作为key，其余部分作为value 构建HashTable
                String[] splits = line.split(",");
                // 货币ID
                String id = splits[0];

                ht.put(id, splits[1] + "#" + splits[2]);
            }

//            multipleOutputs = new MultipleOutputs<>(context);
        }

        // 每条数据会执行一次
        @Override
        protected void map(LongWritable key, Text value, Mapper<LongWritable, Text, Text, NullWritable>.Context context) throws IOException, InterruptedException {
            // 逐条处理 Foreign_Government_Loans贷款数据
            String[] splits = value.toString().split(",");
            // 提取 项目序号,行业,项目类别,币种ID,贷款金额,实际提款额,已还本金额,利息金额 字段
            int no = Integer.parseInt(splits[0]);
            String hangye = splits[1];
            String xmlb = splits[2];
            String id = splits[3];
            double dkje = Double.parseDouble(splits[4]);
            double sjtke = Double.parseDouble(splits[5]);
            double yhbje = Double.parseDouble(splits[6]);
            double lxje = Double.parseDouble(splits[7]);

            // 1、币种名称
            String hlAndName = ht.getOrDefault(id, "无");
            double hl = Double.parseDouble(hlAndName.split("#")[0]); // 汇率
            String name = hlAndName.split("#")[1]; // 币种名称

            // 2、进行汇率转换
            double dkje_rmb = dkje * hl;
            double sjtke_rmb = sjtke * hl;
            double yhbje_rmb = yhbje * hl;
            double lxje_rmb = lxje * hl;

            // 3、计算贷款余额
            double dkye = sjtke - yhbje;
            double dkye_rmb = dkye * hl;

            String res = no + "," + hangye + "," + xmlb + "," + id + "," + name + "," + dkje + "," + dkje_rmb + "," + sjtke + "," + sjtke_rmb + "," + yhbje + "," + yhbje_rmb + "," + lxje + "," + lxje_rmb + "," + dkye + "," + dkye_rmb;

            context.write(new Text(res), NullWritable.get());


        }
    }

    // Reduce 端

    // 自定义FileOutputFormat
    public static class MyFileOutputFormat extends FileOutputFormat<Text, NullWritable> {

        @Override
        public RecordWriter<Text, NullWritable> getRecordWriter(TaskAttemptContext taskAttemptContext) throws IOException {
            FileSystem fileSystem = FileSystem.newInstance(taskAttemptContext.getConfiguration());
            //自定义的输出路径及文件名称
            final FSDataOutputStream title = fileSystem.create(new Path("/data/MR/output1/Foreign_chang"));
            return new RecordWriter<Text, NullWritable>() {

                @Override
                public void close(TaskAttemptContext arg0) throws IOException {
                    if (title != null) {
                        title.close();
                    }
                }

                @Override
                public void write(Text key, NullWritable value) throws IOException {
                    String sep = " "; // 分隔符
                    String charSet = "UTF-8";
                    //输出key
                    title.write(key.toString().getBytes(charSet), 0, key.toString().getBytes(charSet).length);
                    //输出key和value的分隔符
                    title.write(sep.getBytes(charSet), 0, sep.getBytes(charSet).length);
                    //输出value
                    title.write(value.toString().getBytes(charSet), 0, value.toString().getBytes(charSet).length);
                    // 输出换行符
                    title.write("\n".getBytes(charSet), 0, "\n".getBytes(charSet).length);
                    title.flush();
                }
            };
        }
    }


    // Driver 端
    public static void main(String[] args) throws IOException, InterruptedException, ClassNotFoundException {
        Configuration conf = new Configuration();

        Job job = Job.getInstance(conf);
        job.setJobName("MapReduce01");
        job.setJarByClass(MapReduce01.class);

        // 设置Map端
        job.setMapperClass(MyMapper.class);
        job.setMapOutputKeyClass(Text.class);
        job.setMapOutputValueClass(NullWritable.class);
        // 设置Reduce端

        // 设置输入输出路径
        FileInputFormat.addInputPath(job, new Path("/data/MR/input1/"));
        // 设置输出的Format类
        job.setOutputFormatClass(MyFileOutputFormat.class);

        // 判断输出路径是否存在
        FileSystem fs = FileSystem.get(conf);
        Path path = new Path("/data/MR/output1");
        if (fs.exists(path)) {
            fs.delete(path, true);
        }

        FileOutputFormat.setOutputPath(job, path);

        job.waitForCompletion(true);
    }
    // hadoop jar Competition2021-1.0.jar com.shujia.MR.MapReduce01
}
