package com.hh.xx.hcatalogmr;

import com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.apache.hive.hcatalog.data.DefaultHCatRecord;
import org.apache.hive.hcatalog.data.HCatRecord;
import org.apache.hive.hcatalog.data.schema.HCatSchema;
import org.apache.hive.hcatalog.mapreduce.HCatInputFormat;
import org.apache.hive.hcatalog.mapreduce.HCatOutputFormat;
import org.apache.hive.hcatalog.mapreduce.InputJobInfo;
import org.apache.hive.hcatalog.mapreduce.OutputJobInfo;

import java.io.IOException;
import java.util.Arrays;
import java.util.Iterator;
import java.util.Map;

/**
 * export LIB_JARS=/myjarpath/hive-exec-3.1.2.jar,/myjarpath/hive-hcatalog-core-3.1.2.jar,/myjarpath/hive-metastore-3.1.2.jar,/myjarpath/jdo-api-3.0.1.jar,/myjarpath/libfb303-0.9.3.jar,/myjarpath/libthrift-0.9.3.jar,/myjarpath/datanucleus-api-jdo-4.2.4.jar,/myjarpath/datanucleus-core-4.1.17.jar,/myjarpath/datanucleus-rdbms-4.1.19.jar
 * export HADOOP_CLASSPATH=xxxxxx:/myjarpath/*
 * hadoop jar hivetutorial-1.0-SNAPSHOT.jar com.hh.xx.hcatalogmr.MyJob -libjars $LIB_JARS
 *
 * @author tzp
 * @since 2020/5/20
 */
public class MyJob extends Configured implements Tool {
    @VisibleForTesting
    static String TEMP_OUTPUT_PATH = "/user/mz_supertool/test/splitjobws/";

    public static void main(String[] args) throws Exception {
        Configuration conf = new Configuration();
        System.exit(ToolRunner.run(conf, new MyJob(), args));
    }

    @Override
    public int run(String[] strings) throws Exception {
        Configuration conf = getConf();
//        conf.set("metastore.thrift.uris", "thrift://localhost:9083");
        String inputTableName = "test2";
        String outputTableName = "test_out2";
        String dbName = "default";

        Job job = Job.getInstance(conf, "My HCat Job");
        job.setJarByClass(this.getClass());

        HCatInputFormat.setInput(job, dbName,
                inputTableName, null);
        job.setInputFormatClass(HCatInputFormat.class);
//        HCatInputFormat.setOutputSchema();

        job.setMapperClass(MyMap.class);
        job.setReducerClass(MyReduce.class);

        job.setMapOutputKeyClass(Text.class);
        job.setMapOutputValueClass(IntWritable.class);
        job.setOutputKeyClass(WritableComparable.class);
        job.setOutputValueClass(DefaultHCatRecord.class);

        HCatOutputFormat.setOutput(job, OutputJobInfo.create(dbName,
                outputTableName, null));

        HCatSchema s = HCatOutputFormat.getTableSchema(job.getConfiguration());
        System.err.println("INFO: output schema explicitly set for writing:" + s);
        HCatOutputFormat.setSchema(job, s);
        job.setOutputFormatClass(HCatOutputFormat.class);

        return (job.waitForCompletion(true) ? 0 : 1);
    }

    public static class MyMap extends Mapper<WritableComparable, HCatRecord, Text, IntWritable> {
        private HCatSchema schema;

        @Override
        protected void setup(Context context)
                throws IOException, InterruptedException {
            schema = HCatInputFormat.getTableSchema(context.getConfiguration());
            if (schema == null) {
                throw new RuntimeException("schema is null");
            }

        }

        @Override
        protected void map(WritableComparable key, HCatRecord value, Context context)
                throws IOException, InterruptedException {
            String id = (String) value.get("id", schema);
            Integer xx = (Integer) value.get("xx", schema);
            context.write(new Text(id), new IntWritable(xx));
        }
    }


    public static class MyReduce extends Reducer<Text, IntWritable, WritableComparable, HCatRecord> {

        @Override
        protected void reduce(Text key, Iterable<IntWritable> values, Context context) throws IOException, InterruptedException {
            int sum = 0;
            Iterator<IntWritable> iter = values.iterator();
            while (iter.hasNext()) {
                sum++;
                iter.next();
            }
            HCatRecord record = new DefaultHCatRecord(2);
            record.set(0, key.toString());
            record.set(1, sum);

            context.write(null, record);
        }
    }
}
