package us.luosl.hadoop.mr;

import org.apache.commons.io.FileUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.mapred.lib.db.DBConfiguration;
import org.apache.hadoop.mapred.lib.db.DBOutputFormat;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.db.DBInputFormat;
import org.apache.hadoop.mapreduce.lib.db.DBWritable;

import java.io.DataInput;
import java.io.DataOutput;
import java.io.File;
import java.io.IOException;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.Iterator;

/**
 * 数据库访问
 * Created by luosl on 2018/9/3.
 */
public class DBAccess {

    /**
     * 定义数据库读写
     */
    public static class TestDBWritable implements DBWritable, Writable{

        private String name;
        private String alias;

        @Override
        public void write(PreparedStatement statement) throws SQLException {
            System.out.println();
            statement.setString(1, name);
            statement.setString(2, alias);
        }

        @Override
        public void readFields(ResultSet resultSet) throws SQLException {
            name = resultSet.getString("name");
            alias = resultSet.getString("alias");
        }

        public String getName() {
            return name;
        }

        public TestDBWritable setName(String name) {
            this.name = name;
            return this;
        }

        public String getAlias() {
            return alias;
        }

        public TestDBWritable setAlias(String alias) {
            this.alias = alias;
            return this;
        }

        @Override
        public void write(DataOutput out) throws IOException {
            out.writeUTF(name);
            out.writeUTF(alias);
        }

        @Override
        public void readFields(DataInput in) throws IOException {
            name = in.readUTF();
            alias = in.readUTF();
        }
    }

    public static class DBMapper extends Mapper<Object, TestDBWritable, Text, Text>{
        private Text text1 = new Text();
        private Text text2 = new Text();
        @Override
        protected void map(Object key, TestDBWritable value, Context context) throws IOException, InterruptedException {
            text1.set(value.getName());
            text2.set(value.getAlias());
            context.write(text1, text2);
        }
    }

    public static class DBReducer extends Reducer<Text, Text, TestDBWritable, NullWritable>{
        private String separator;
        private TestDBWritable dbw = new TestDBWritable();

        @Override
        protected void setup(Context context) throws IOException, InterruptedException {
            Configuration conf = context.getConfiguration();
            separator = conf.get("DBAccess.separator", ";");
        }

        @Override
        protected void reduce(Text key, Iterable<Text> values, Context context)
                throws IOException, InterruptedException {
            String str = mkStr(values);
            dbw.setName(key.toString()).setAlias(str);
            context.write(dbw, NullWritable.get());
        }

        private String mkStr(Iterable<Text> values){
            StringBuilder buff = new StringBuilder();
            Iterator<Text> itr = values.iterator();
            while(itr.hasNext()){
                buff.append(itr.next().toString());
                buff.append(separator);
            }
            if(buff.length() > 0){
                buff.subSequence(0, buff.length() - separator.length() - 1);
            }
            return buff.toString();
        }
    }

    public static void main(String[] args) throws Exception {
        File out = new File("out");
        if(out.exists()){
            FileUtils.forceDelete(out);
        }

        Configuration conf = new Configuration();
        // 关闭输出压缩
        conf.set("mapreduce.output.fileoutputformat.compress", "false");
        // 定义数据库属性
        DBConfiguration.configureDB(conf,
                "com.mysql.jdbc.Driver",
                "jdbc:mysql://127.0.0.1/test?useUnicode=true&characterEncoding=utf-8",
                "root",
                "123456"
        );

        Job job = Job.getInstance(conf, "db access");
        job.setJarByClass(DBAccess.class);
        job.setMapperClass(DBMapper.class);
        job.setReducerClass(DBReducer.class);
        job.setMapOutputKeyClass(Text.class);
        job.setMapOutputValueClass(Text.class);
        job.setOutputKeyClass(TestDBWritable.class);
        job.setOutputValueClass(NullWritable.class);

        // 设置输入
        job.setInputFormatClass(DBInputFormat.class);
        DBInputFormat.setInput(job, TestDBWritable.class,
                "select * from test",
                "select count(*) from test"
        );

        // 设置输出
        job.setOutputFormatClass(DBOutputFormat.class);
        DBOutputFormat.setOutput(job, "testResult", "name", "alias");

        System.exit(job.waitForCompletion(true) ? 0 : 1);
    }
}
