package com.mango.ch11;

import com.mango.Tools.DateUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.jobcontrol.ControlledJob;
import org.apache.hadoop.mapreduce.lib.jobcontrol.JobControl;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;

import java.io.*;

public class MarkvoJob extends Configured implements Tool {
    private static String ROOTPATH = "d:/HadoopData";
    private static String MR1_INPUT = "d:/HadoopData/input";
    private static String MR1_OUTPUT = "d:/HadoopData/step1_output";
    private static String MR2_OUTPUT = "d:/HadoopData/step2_output";
    private static String MR3_OUTPUT = "d:/HadoopData/step3_output";

    public static void main(String[] args) {
        try {
            int status = ToolRunner.run(new Configuration(), new MarkvoJob(), args);
            System.exit(status);
        } catch (Exception e) {
            e.printStackTrace();
        }
    }

    static class MR1_Mapper extends Mapper<LongWritable, Text, CompositeKey, Nvalue> {

        @Override
        protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
            String[] tokens = value.toString().split(",");
            //the input text format
            // customerid,transcationid,purchaseDate,amount
            CompositeKey mkey = new CompositeKey();
            mkey.setName(tokens[0]);
            Long date = DateUtils.dateToLong(tokens[2]);
            mkey.setDate(date);
            Nvalue nv = new Nvalue();
            nv.setDate(date);
            nv.setAmount(Integer.valueOf(tokens[3]));
            context.write(mkey, nv);
        }
    }

    static class MR1_Reducer extends Reducer<CompositeKey, Nvalue, Text, Text> {
        @Override
        protected void reduce(CompositeKey key, Iterable<Nvalue> values, Context context) throws IOException, InterruptedException {
            StringBuilder sb = new StringBuilder();
            for (Nvalue nv :
                    values) {
                sb.append(nv.toString());
                sb.append(";");
            }
            sb.deleteCharAt(sb.lastIndexOf(";"));
            context.write(new Text(key.getName()), new Text(sb.toString()));
        }
    }

    static class MR2_Mapper extends Mapper<LongWritable, Text, Text, Text> {

        @Override
        protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
            String[] tokens = value.toString().split("\t");
            String transcatioinId = tokens[0];
//            System.out.println("读取的数据de id : "+transcatioinId);
            String[] keyValue = tokens[1].trim().split(";");
//            System.out.print("keyValue's size is "+keyValue.length);
            if (keyValue.length < 2)
                //没有足够的证据  不能前后两个数据进行比较  至少要有两次交易记录
                return;
            for (int i = 0; i < keyValue.length; i++) {
                if (i + 1 < keyValue.length) {
                    String[] preItem = keyValue[i].split(",");
                    String[] nextItem = keyValue[i + 1].split(",");
                    Long timeDiff = DateUtils.getDatePoor(preItem[0], nextItem[0]);
                    double preAmount = Double.valueOf(preItem[1]);
                    double nextAmount = Double.valueOf(nextItem[1]);
                    String dd = null;
                    String ad = null;
                    if (timeDiff < 30)
                        dd = "S";
                    else if (timeDiff < 60)
                        dd = "M";
                    else
                        dd = "L";
                    if (preAmount < 0.9 * nextAmount)
                        ad = "L";
                    else if (preAmount < 1.1 * nextAmount)
                        ad = "E";
                    else
                        ad = "G";
//                    System.out.println("即将写出 "+ transcatioinId+" : "+dd+ad);
                    context.write(new Text(transcatioinId), new Text(dd + ad));
                }
            }
        }
    }

    static class MR2_Reducer extends Reducer<Text, Text, Text, Text> {
        @Override
        protected void reduce(Text key, Iterable<Text> values, Context context) throws IOException, InterruptedException {
            StringBuilder sb = new StringBuilder();
            for (Text item : values) {
                sb.append(item);
                sb.append(",");
            }
            sb.deleteCharAt(sb.lastIndexOf(","));
            context.write(key, new Text(sb.toString()));
        }
    }

    static class MR3_Mapper extends Mapper<LongWritable, Text, Text, IntWritable> {

        @Override
        protected void map(LongWritable key, Text value, Context context) {
            String[] tokens = value.toString().split("\t");
            String transcationId = tokens[0];
            String[] statusList = tokens[1].split(",");
            int size = statusList.length;
            for (int i = 0; i < size - 1; i++) {
                String reducerKey = statusList[i] + "->" + statusList[i + 1];
                try {
                    context.write(new Text(reducerKey), new IntWritable(1));
                } catch (IOException e) {
                    e.printStackTrace();
                } catch (InterruptedException e) {
                    e.printStackTrace();
                }
            }
        }
    }

    static class MR3_Combiner extends Reducer<Text, IntWritable, Text, IntWritable> {
        @Override
        protected void reduce(Text key, Iterable<IntWritable> values, Context context) throws IOException, InterruptedException {
            int partiaSum = 0;
            for (IntWritable num :
                    values) {
                partiaSum += num.get();
            }
            context.write(key, new IntWritable(partiaSum));

        }
    }

    static class MR3_Reducer extends Reducer<Text, IntWritable, Text, IntWritable> {
        @Override
        protected void reduce(Text key, Iterable<IntWritable> values, Context context) {
            int sum = 0;
            for (IntWritable count :
                    values) {
                sum += count.get();
            }
            try {
                context.write(key, new IntWritable(sum));
            } catch (IOException e) {
                e.printStackTrace();
            } catch (InterruptedException e) {
                e.printStackTrace();
            }
        }
    }

    @Override
    public int run(String[] strings) throws Exception {

        Configuration conf = getConf();
        Job job1 = Job.getInstance(conf);
        job1.setJarByClass(this.getClass());

        job1.setGroupingComparatorClass(CompositeKeyGroupCompartot.class);
        job1.setPartitionerClass(MyPartitioner.class);
        job1.setSortComparatorClass(CompositeKeySortComparetor.class);
        job1.setMapperClass(MarkvoJob.MR1_Mapper.class);
        job1.setReducerClass(MarkvoJob.MR1_Reducer.class);
        job1.setMapOutputKeyClass(CompositeKey.class);
        job1.setMapOutputValueClass(Nvalue.class);
        job1.setOutputKeyClass(Text.class);
        job1.setOutputValueClass(Text.class);
        FileInputFormat.addInputPath(job1, new Path(MR1_INPUT));
        FileOutputFormat.setOutputPath(job1, new Path(MR1_OUTPUT));

        Job job2 = Job.getInstance(conf);
        job2.setJarByClass(this.getClass());
        job2.setMapperClass(MarkvoJob.MR2_Mapper.class);
        job2.setMapOutputKeyClass(Text.class);
        job2.setReducerClass(MarkvoJob.MR2_Reducer.class);
        job2.setMapOutputValueClass(Text.class);
        job2.setOutputKeyClass(Text.class);
        job2.setOutputValueClass(Text.class);
        FileInputFormat.addInputPath(job2, new Path(MR1_OUTPUT));
        FileOutputFormat.setOutputPath(job2, new Path(MR2_OUTPUT));

        Job job3 = Job.getInstance(conf);
        job3.setJarByClass(this.getClass());
        job3.setMapperClass(MarkvoJob.MR3_Mapper.class);
        job3.setCombinerClass(MarkvoJob.MR3_Combiner.class);
        job3.setReducerClass(MarkvoJob.MR3_Reducer.class);
        job3.setMapOutputKeyClass(Text.class);
        job3.setMapOutputValueClass(IntWritable.class);
        job3.setOutputKeyClass(Text.class);
        job3.setOutputValueClass(IntWritable.class);
        FileInputFormat.addInputPath(job3, new Path(MR2_OUTPUT));
        FileOutputFormat.setOutputPath(job3, new Path(MR3_OUTPUT));


        ControlledJob cj1 = new ControlledJob(conf);
        cj1.setJob(job1);
        ControlledJob cj2 = new ControlledJob(conf);
        cj2.setJob(job2);
        ControlledJob cj3 = new ControlledJob(conf);
        cj3.setJob(job3);

        cj2.addDependingJob(cj1);
        cj3.addDependingJob(cj2);

        JobControl jc = new JobControl(this.getClass().getSimpleName());
        jc.addJob(cj1);
        jc.addJob(cj2);
        jc.addJob(cj3);
        new Thread(jc).start();
        while (true) {
            for (ControlledJob cj : jc.getRunningJobList()) {
                cj.getJob().monitorAndPrintJob();
            }
            if (jc.allFinished()) {
                System.out.println("所有Job 均已完成");
                saveToTxtFile(new File(MR3_OUTPUT), ROOTPATH + "/model.txt");
                break;
            }

        }
        return 0;
    }

    public void saveToTxtFile(File directory, String tartgetPath) {
        FileWriter fw = null;
        BufferedWriter bw = null;
        try {
            FileReader fr = null;
            fw = new FileWriter(tartgetPath);
            bw = new BufferedWriter(fw);
            for (File f :
                    directory.listFiles()) {
                if (f.isFile()) {
                    String fileName = f.getName();
                    if (fileName.startsWith(".")) continue;
                    fr = new FileReader(f);
                    BufferedReader br = new BufferedReader(fr);
                    String line = null;
                    while ((line = br.readLine()) != null) {
                        bw.write(line);
                        bw.newLine();
                    }
                    br.close();
                }

            }
            bw.flush();
        } catch (IOException e) {
            e.printStackTrace();
        } finally {
            try {
                if (bw != null)
                    bw.close();
            } catch (IOException e) {
                e.printStackTrace();
            }
        }
    }
}
