package cn.com.coding.common.utils;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.chain.ChainMapper;
import org.apache.hadoop.mapreduce.lib.chain.ChainReducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
import org.apache.hadoop.mapreduce.lib.partition.HashPartitioner;
import org.springframework.stereotype.Component;

import java.io.BufferedReader;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.URI;
import java.net.URISyntaxException;

/**
 * 1 数据清洗说明：
 * 1. 第一列是时间；
 * 2. 第二列是卖出方；
 * 3. 第三列是买入方；
 * 4. 第四列是票的数量；
 * 5. 第五列是金额。
 * 卖出方，买入方一共三个角色，机场（C开头），代理人（O开头）和一般顾客（PAX）
 * <p>
 * 2 数据清洗要求：
 * 1. 统计最繁忙的机场Top10（包括买入卖出）；
 * 2. 统计最受欢迎的航线；（起点终点一致（或相反））
 * 3. 统计最大的代理人TOP10；
 * 4. 统计某一天的各个机场的卖出数据top10。
 * <p>
 * 3 数据可视化要求：
 * 1. 上述四中统计要求可以用饼图、柱状图等显示；
 * 2. 可用关系图展示各个机场之间的联系程度（以机票数量作为分析来源）。
 * <p>
 * 实验关键部分代码（列举统计最繁忙机场的代码，其他代码大同小异）：
 * 数据初步情理，主要是过滤出各个机场个总票数
 *
 * @author inke219223m
 */
@Component
public class ChainMapReduce {

    private static final String INPUTPATH = "hdfs://8.130.17.56:8020/fly/region.txt";
    private static final String OUTPUTPATH = "hdfs://8.130.17.56:8020/fly/out1";

    public static void main(String[] args) throws IOException, URISyntaxException, InterruptedException {

        testCopyFromLocalFile();
//        testCopyToLocalFile();
        fixHadoop();
    }


    /**
     * 数据初步情理，主要是过滤出各个机场个总票数
     * <p>
     * 卖出方，买入方一共三个角色，机场（C开头），代理人（O开头）和一般顾客（PAX）
     * <p>
     * 2	C1	    O1254	2	    1400
     * 2	O5517	O4753	8	    2500
     * 2	O3459	PAX     6	    5500
     * 1    O700    O4133   1       2200
     */
    public static boolean fixHadoop() {
        try {
            System.setProperty("HADOOP_USER_NAME", "codingce");
            Configuration conf = new Configuration();
            conf.set("dfs.replication", "2");
            conf.set("dfs.client.socket-timeout", "300000");
            //添加此配置信息即可
            conf.set("dfs.client.use.datanode.hostname", "true");
            FileSystem fs = FileSystem.get(new URI(OUTPUTPATH), conf, "codingce");
            if (fs.exists(new Path(OUTPUTPATH))) {
                fs.delete(new Path(OUTPUTPATH), true);
            }
            Job job = Job.getInstance(conf);
            FileInputFormat.addInputPath(job, new Path(INPUTPATH));
            job.setInputFormatClass(TextInputFormat.class);
            ChainMapper.addMapper(job, FilterMapper1.class, LongWritable.class, Text.class, Text.class, IntWritable.class, conf);
            ChainReducer.setReducer(job, SumReducer.class, Text.class, IntWritable.class, Text.class, IntWritable.class, conf);
            job.setMapOutputKeyClass(Text.class);
            job.setMapOutputValueClass(IntWritable.class);
            job.setPartitionerClass(HashPartitioner.class);
            job.setNumReduceTasks(1);
            job.setOutputKeyClass(Text.class);
            job.setOutputValueClass(IntWritable.class);
            FileOutputFormat.setOutputPath(job, new Path(OUTPUTPATH));
            job.setOutputFormatClass(TextOutputFormat.class);
            // 7 提交 job
            boolean result = job.waitForCompletion(true);
            // System.exit(result ? 0 : 1);
            return true;
        } catch (Exception e) {
            return false;
        }
    }

    public static class FilterMapper1 extends Mapper<LongWritable, Text, Text, IntWritable> {
        private Text outKey = new Text();
        private IntWritable outValue = new IntWritable();

        @Override
        protected void map(LongWritable key, Text value, Mapper<LongWritable, Text, Text, IntWritable>.Context context) throws IOException, InterruptedException {
            String line = value.toString();
            if (line.length() > 0) {
                String[] arr = line.split("\t");
                //1       O700    O4133   1       2200
                int visit = Integer.parseInt(arr[3]);
                if (arr[1].substring(0, 1).equals("C") || arr[2].substring(0, 1).equals("C")) {

                    outKey.set(arr[1]);
                    outValue.set(visit);
                    context.write(outKey, outValue);
                }
            }
        }
    }

    public static class SumReducer extends Reducer<Text, IntWritable, Text, IntWritable> {
        private IntWritable outValue = new IntWritable();

        @Override
        protected void reduce(Text key, Iterable<IntWritable> values, Reducer<Text, IntWritable, Text, IntWritable>.Context context) throws IOException, InterruptedException {
            int sum = 0;
            for (IntWritable val : values) {
                sum += val.get();
            }
            outValue.set(sum);
            context.write(key, outValue);
        }
    }

    public static void testCopyFromLocalFile() throws IOException {
        FileSystem fs = null;
        try {
            // 1 获取文件系统
            Configuration configuration = new Configuration();
            configuration.set("dfs.replication", "2");
            configuration.set("dfs.client.socket-timeout", "300000");
            //添加此配置信息即可
            configuration.set("dfs.client.use.datanode.hostname", "true");
            fs = FileSystem.get(new URI("hdfs://hadoopali:8020"), configuration, "codingce");
            // 2 上传文件
            fs.copyFromLocalFile(new Path("/Users/mxz/order.txt"), new Path("/mapreducetest/test1.txt"));

        } catch (Exception e) {
            System.out.println("失败了\t" + e.getMessage() + "\t操作失败");
        } finally {
            // 3 关闭资源
            fs.close();
        }


    }

    /**
     * HDFS 文件下载
     *
     * @throws IOException
     * @throws InterruptedException
     * @throws URISyntaxException
     */
    public static void testCopyToLocalFile() throws IOException, InterruptedException, URISyntaxException {
        // 1 获取文件系统
        Configuration configuration = new Configuration();
        FileSystem fs = FileSystem.get(new URI("hdfs://hadoopali:8020"), configuration, "codingce");
        // 2 执行下载操作
        // boolean delSrc 指是否将原文件删除
        // Path src 指要下载的文件路径
        // Path dst 指将文件下载到的路径
        // boolean useRawLocalFileSystem 是否开启文件校验 fs.copyToLocalFile(false, new
        fs.copyToLocalFile(false, new Path("mapreducetest/order.txt"), new Path("/Users/mxz/mxz-code/gitee/cat-leader/cat-common/src/main/java/cn/com/coding/common/utils/hello2.txt"), true);

        // 3 关闭资源
        fs.close();
    }


}