package com.heibaiying;

import com.heibaiying.component.WordCountMapper;
import com.heibaiying.component.WordCountReducer;
import org.apache.commons.lang.SystemUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.nativeio.NativeIO;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.net.URI;

/**
 * 组装作业 并提交到集群运行
 */
public class WordCountApp {


    // 这里为了直观显示参数 使用了硬编码，实际开发中可以通过外部传参
    private static final String HDFS_URL = "file:///";
    private static final String HADOOP_USER_NAME = System.getProperty("user.name");

    public static void main(String[] args) throws Exception {
        // 显式加载本地库
        String libPath = "C:\\hadoop\\bin\\hadoop.dll";
        System.load(libPath);

        args = new String[]{"/files/input.txt", "/files/output"};

        //  文件输入路径和输出路径由外部传参指定
        if (args.length < 2) {
            System.out.println("Input and output paths are necessary!");
            return;
        }

//        // 需要指明hadoop用户名，否则在HDFS上创建目录时可能会抛出权限不足的异常
//        System.setProperty("HADOOP_USER_NAME", HADOOP_USER_NAME);

        // windows预处理
        initializeWindowsEnvironment();


        Configuration configuration = new Configuration();
        // 指明HDFS的地址
        configuration.set("fs.defaultFS", HDFS_URL);
        configuration.set("mapreduce.framework.name", "local");
        configuration.set("hadoop.tmp.dir", System.getProperty("java.io.tmpdir"));
        // 文件系统优化
        configuration.set("fs.file.impl", "org.apache.hadoop.fs.LocalFileSystem");
        configuration.setBoolean("mapreduce.map.speculative", false);

        // Windows特殊处理
        System.setProperty("hadoop.home.dir", "C:\\hadoop");
        configuration.set("mapreduce.app-submission.cross-platform", "true");


        // 创建一个Job
        Job job = Job.getInstance(configuration);

        // 设置运行的主类
        job.setJarByClass(WordCountApp.class);

        // 设置Mapper和Reducer
        job.setMapperClass(WordCountMapper.class);
        job.setReducerClass(WordCountReducer.class);

        // 设置Mapper输出key和value的类型
        job.setMapOutputKeyClass(Text.class);
        job.setMapOutputValueClass(IntWritable.class);

        // 设置Reducer输出key和value的类型
        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(IntWritable.class);

        // 如果输出目录已经存在，则必须先删除，否则重复运行程序时会抛出异常
        FileSystem fileSystem = FileSystem.get(new URI(HDFS_URL), configuration, HADOOP_USER_NAME);
        Path outputPath = new Path("file:///" + System.getProperty("user.dir") + args[1]);
        if (fileSystem.exists(outputPath)) {
            fileSystem.delete(outputPath, true);
        }


        // 设置作业输入文件和输出文件的路径
        FileInputFormat.setInputPaths(job, new Path("file:///" + System.getProperty("user.dir") + args[0]));
        FileOutputFormat.setOutputPath(job, outputPath);

        // 将作业提交到群集并等待它完成，参数设置为true代表打印显示对应的进度
        boolean result = job.waitForCompletion(true);

        // 关闭之前创建的fileSystem
        fileSystem.close();

        // 根据作业结果,终止当前运行的Java虚拟机,退出程序
        System.exit(result ? 0 : -1);

    }

    private static void initializeWindowsEnvironment() {
        if (SystemUtils.IS_OS_WINDOWS) {
            // 设置虚拟Hadoop目录
            System.setProperty("hadoop.home.dir", "C:\\hadoop_home");
            // 禁用NativeIO检查
            System.setProperty("hadoop.security.native.lib", "false");
            // 防止Hadoop修改控制台标题
            System.setProperty("hadoop.util.native.lib.available", "false");
        }
    }

    private static void loadHadoopNativeLib() {
        try {
            // 显式加载本地库
            String libPath = "C:\\hadoop\\bin\\hadoop.dll";
            System.load(libPath);

            // 绕过NativeIO检查
            Field field = NativeIO.class.getDeclaredField("Windows");
            field.setAccessible(true);
            Class<?> windowsClass = field.getType();

            Method method = windowsClass.getDeclaredMethod("access0", String.class, Integer.TYPE);
            method.setAccessible(true);
        } catch (Exception e) {
            System.err.println("本地库加载失败: " + e.getMessage());
        }
    }
}
