package my.io.hadoop.jobs.db;

import cn.hutool.core.util.StrUtil;
import lombok.RequiredArgsConstructor;
import my.io.hadoop.Main;
import my.io.hadoop.entity.JobResult;
import my.io.hadoop.entity.JobInput;
import my.io.hadoop.jobs.JobComponent;
import org.hibernate.cfg.PropertyContainer;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.db.DBInputFormat;
import org.apache.hadoop.mapreduce.lib.db.DBOutputFormat;
import org.hibernate.annotations.common.reflection.XClass;
import org.hibernate.annotations.common.reflection.XProperty;
import org.hibernate.annotations.common.reflection.java.JavaReflectionManager;
import org.hibernate.cfg.AccessType;
import org.springframework.stereotype.Component;

import java.io.IOException;
import java.math.BigDecimal;
import java.util.ArrayList;

@Component
@RequiredArgsConstructor
public class DBJob implements JobComponent {

    private static final String DEFAULT_NAME = "default";

    private final Configuration configuration;

    public static final Class<?> INPUT_CLASS = JobInput.class;
    public static final Class<?> RESULT_CLASS = JobResult.class;

    public static final PropertyContainer JOB_OUT_PROPS;

    public static final PropertyContainer JOB_IN_PROPS;

    public static final String[] INPUT_COLS;

    public static final String[] OUTPUT_COLS;

    static {
        JavaReflectionManager reflectionManager = new JavaReflectionManager();
        XClass xClass = reflectionManager.toXClass(RESULT_CLASS);
        JOB_OUT_PROPS = new PropertyContainer(xClass, xClass, AccessType.FIELD);
        xClass = reflectionManager.toXClass(INPUT_CLASS);
        ArrayList<String> colArr = new ArrayList<>();
        for (XProperty xProperty : JOB_OUT_PROPS.propertyIterator()) {
            if (xProperty.getName().equals("id")) {
                continue;
            }
            colArr.add(xProperty.getName());
        }
        OUTPUT_COLS = colArr.toArray(new String[]{});


        JOB_IN_PROPS = new PropertyContainer(xClass, xClass, AccessType.FIELD);
        colArr.clear();
        for (XProperty xProperty : JOB_IN_PROPS.propertyIterator()) {
            if (xProperty.getName().equals("id")) {
                continue;
            }
            colArr.add(xProperty.getName());
        }
        INPUT_COLS = colArr.toArray(new String[]{});
    }

    public void runJob(String jobName, String inPath) throws IOException, InterruptedException, ClassNotFoundException {
        jobName = StrUtil.blankToDefault(jobName, DEFAULT_NAME);

        Job job = Job.getInstance(configuration, jobName);

        job.setJarByClass(Main.class);

        job.setMapperClass(JobMap.class);
        job.setMapOutputKeyClass(Text.class);
        job.setMapOutputValueClass(DBJobRow.class);

        job.setReducerClass(JobReducer.class);
        job.setMapOutputKeyClass(DBJobRow.class);
        job.setMapOutputValueClass(NullWritable.class);

        // 设置输入输出
//        DBInputFormat.setInput(job, DBJobRow.class, StrUtil.toUnderlineCase(INPUT_CLASS.getSimpleName()), null, null, DBJob.INPUT_COLS);
        DBInputFormat.setInput(job, DBJobRow.class, "select * from a", "select count(1) from a");
        DBOutputFormat.setOutput(job, StrUtil.toUnderlineCase(RESULT_CLASS.getSimpleName()), DBJob.OUTPUT_COLS);
        // 运行任务
        job.waitForCompletion(true);
    }



    public static class JobMap extends Mapper<LongWritable, DBJobRow, Text, DBJobRow> {
        private final Text k = new Text();

        @Override
        protected void map(LongWritable key, DBJobRow value, Mapper<LongWritable, DBJobRow, Text, DBJobRow>.Context context) throws IOException, InterruptedException {
            String year = value.getStr("date").substring(0, 4);
            k.set(year);
            context.write(k, value);
        }
    }

    public static class JobReducer extends Reducer<Text, DBJobRow, DBJobRow, NullWritable> {
        @Override
        protected void reduce(Text key, Iterable<DBJobRow> values, Reducer<Text, DBJobRow, DBJobRow, NullWritable>.Context context) throws IOException, InterruptedException {
            BigDecimal kt, ws, qt, cf, wsj;
            kt = ws = qt = cf = wsj = BigDecimal.ZERO;

            for (DBJobRow value : values) {
                kt = kt.add(value.getBigDecimal("kt"));
                ws = ws.add(value.getBigDecimal("ws"));
                qt = qt.add(value.getBigDecimal("qt"));
                cf = cf.add(value.getBigDecimal("cf"));
                wsj = wsj.add(value.getBigDecimal("wsj"));

            }
            DBJobRow result = new DBJobRow();
            result.set("date", key.toString());
            result.set("kt", kt);
            result.set("ws", ws);
            result.set("qt", qt);
            result.set("cf", cf);
            result.set("wsj", wsj);
            context.write(result, null);
        }
    }

}
