package MRDemo;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import workCount.HDFS_client;

import java.io.IOException;
import java.net.URISyntaxException;

public class Main {
    public static void main(String[] args) throws IOException, URISyntaxException, InterruptedException, ClassNotFoundException {
        String URL = "hdfs://192.168.35.131:9000";
        String localDir = "D:\\data\\HCIP\\files\\consume.log";

        //固定变量
        //定义HDFS用户
        String user = "root";
        //定义处理的原始数据导入路径
        String srcDir = "/input";
        //定义处理完成输出的路径
        String destDir = "/output";
        //需要处理的文件所在的HDFS位置
//        String fileName = srcDir + "/wordCount.txt";
        String fileName = srcDir + "/consume.log";


        HDFS_client client = new HDFS_client(URL, user);

        client.createDir(srcDir);

        //上传需要操作的wordCount文件
        client.uploadFile(localDir, srcDir);
        //查看上传的文件内容
        System.out.println("==========\n" + "当前需要处理的文件内容");
        client.printFile(fileName);
        client.delFile(destDir);
        //创建存放输出内容的目录
//        client.createDir(destDir);

        //执行MRDemo内容
        Configuration conf = new Configuration();


        conf.set("fs.defaultFS", URL);
//        fs.defaultFS
        Job job = Job.getInstance(conf, "MRDemo");
        //定义任务的输入输出来源

        FileInputFormat.addInputPath(job, new Path(fileName));
        FileOutputFormat.setOutputPath(job, new Path(destDir));

        //设置job的主任务 map reduce
        job.setJarByClass(Main.class);
        job.setMapperClass(mapDemo.class);
        job.setReducerClass(reduceDemo.class);

        //设置MAP的输出类型
        job.setMapOutputKeyClass(Text.class);
        job.setMapOutputValueClass(bean.class);
        //设置Reduce的输出类型
        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(bean.class);

        //执行MR任务
        job.waitForCompletion(true);

        //查看处理完成的文件内容
        System.out.println("==========" + "\n当前处理完成的文件内容\n" + "==========");
        client.printFile(destDir);

        //断开与HDFS的连接
        client.close();
    }
}
