package cn.com.coding.common.utils;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.chain.ChainMapper;
import org.apache.hadoop.mapreduce.lib.chain.ChainReducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
import org.apache.hadoop.mapreduce.lib.partition.HashPartitioner;
import org.springframework.stereotype.Component;

import java.io.BufferedReader;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.Arrays;

/**
 * @author inke219223m
 */

@Component
public class CatOrderMapReduce {

    private static final String INPUTPATH = "hdfs://8.130.17.56:8020/opt/module/user_card_product/part-m-00000";
    private static final String OUTPUTPATH = "hdfs://8.130.17.56:8020/opt/module/user_card_product/user_card_product";
    private static final String ORDERDETAIL = "/Users/inke219223m/mxz-code/gitee/cat-leader/cat-common/src/main/java/cn/com/coding/common/utils/order.txt";

    public static void main(String[] args) throws IOException, URISyntaxException, InterruptedException {
//        System.out.println("===================" + fixHadoop() + "===================");
        testCopyToLocalFile();
//        txtToDb();
    }

    public static boolean fixHadoop() {
        try {
            System.setProperty("HADOOP_USER_NAME", "codingce");
            Configuration conf = new Configuration();
            conf.set("dfs.replication", "2");
            conf.set("dfs.client.socket-timeout", "300000");
            //添加此配置信息即可
            conf.set("dfs.client.use.datanode.hostname", "true");
            FileSystem fs = FileSystem.get(new URI(OUTPUTPATH), conf, "codingce");
            if (fs.exists(new Path(OUTPUTPATH))) {
                fs.delete(new Path(OUTPUTPATH), true);
            }
            Job job = Job.getInstance(conf);
            FileInputFormat.addInputPath(job, new Path(INPUTPATH));
            job.setInputFormatClass(TextInputFormat.class);
            //
            ChainMapper.addMapper(job, CatOrderMapReduce.FilterMapper1.class, LongWritable.class, Text.class, Text.class, IntWritable.class, conf);
            ChainReducer.setReducer(job, CatOrderMapReduce.SumReducer.class, Text.class, IntWritable.class, Text.class, IntWritable.class, conf);
            //
            job.setMapOutputKeyClass(Text.class);
            job.setMapOutputValueClass(IntWritable.class);
            job.setPartitionerClass(HashPartitioner.class);
            job.setNumReduceTasks(1);
            job.setOutputKeyClass(Text.class);
            job.setOutputValueClass(IntWritable.class);
            FileOutputFormat.setOutputPath(job, new Path(OUTPUTPATH));
            job.setOutputFormatClass(TextOutputFormat.class);
            // 7 提交 job
            boolean result = job.waitForCompletion(true);
            // System.exit(result ? 0 : 1);
            return true;
        } catch (Exception e) {
            return false;
        }
    }

    public static class FilterMapper1 extends Mapper<LongWritable, Text, Text, IntWritable> {
        private Text outKey = new Text();
        private IntWritable outValue = new IntWritable();

        @Override
        protected void map(LongWritable key, Text value, Mapper<LongWritable, Text, Text, IntWritable>.Context context) throws IOException, InterruptedException {
            String line = value.toString();
            if (line.length() > 0) {
                String[] arr = line.split("\t");
                System.out.println("============arr============" + Arrays.toString(arr));
                //1       O700    O4133   1       2200
                int visit = Integer.parseInt(arr[3]);
                System.out.println("============visit============" + visit);
//                if (arr[1].substring(0, 1).equals("C") || arr[2].substring(0, 1).equals("C")) {
                outKey.set(arr[2]);
                outValue.set(visit);
                context.write(outKey, outValue);
//                }
            }
        }
    }

    public static class SumReducer extends Reducer<Text, IntWritable, Text, IntWritable> {

        private IntWritable outValue = new IntWritable();

        @Override
        protected void reduce(Text key, Iterable<IntWritable> values, Reducer<Text, IntWritable, Text, IntWritable>.Context context) throws IOException, InterruptedException {
            int sum = 0;
            for (IntWritable val : values) {
                sum += val.get();
            }
            System.out.println("reduce========" + sum + "outValue.toString()" + outValue.toString());
            outValue.set(sum);
            context.write(key, outValue);
        }
    }

    //
    public static void testCopyToLocalFile() throws IOException, InterruptedException, URISyntaxException {
        System.setProperty("HADOOP_USER_NAME", "codingce");
        Configuration conf = new Configuration();
        conf.set("dfs.replication", "2");
        conf.set("dfs.client.socket-timeout", "300000");
        //添加此配置信息即可
        conf.set("dfs.client.use.datanode.hostname", "true");

        // 1 获取文件系统
        FileSystem fs = FileSystem.get(new URI("hdfs://8.130.17.56:8020"), conf, "codingce");
        // 2 执行下载操作
        // boolean delSrc 指是否将原文件删除
        // Path src 指要下载的文件路径
        // Path dst 指将文件下载到的路径
        // boolean useRawLocalFileSystem 是否开启文件校验 fs.copyToLocalFile(false, new
        fs.copyToLocalFile(false, new Path("/opt/module/user_card_product/user_card_product/part-r-00000"), new Path(ORDERDETAIL), true);
        // 3 关闭资源
        fs.close();
    }

    public static void txtToDb() {
        String filePath = ORDERDETAIL;
        FileInputStream fin = null;
        BufferedReader buffReader = null;
        try {
            fin = new FileInputStream(filePath);
            InputStreamReader reader = new InputStreamReader(fin);
            buffReader = new BufferedReader(reader);
            String strTmp = "";
            while ((strTmp = buffReader.readLine()) != null) {
                System.out.println(strTmp.split("\t")[0]);
            }
        } catch (IOException e) {
            e.printStackTrace();
        } finally {
            try {
                buffReader.close();
            } catch (IOException e) {
                e.printStackTrace();
            }
        }

    }

}
