package Mysql;

import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.mapreduce.*;

import java.io.IOException;
import java.util.List;

public class MysqlInputFormat<V extends MysqlInputValueWritable> extends InputFormat<LongWritable, V> {
    @Override
    public List<InputSplit> getSplits(JobContext jobContext) throws IOException, InterruptedException {
        //返回数据分块，am根据分块数量决定map task的数量
        return null;
    }

    @Override
    public RecordReader<LongWritable, V> createRecordReader(InputSplit inputSplit, TaskAttemptContext taskAttemptContext) throws IOException, InterruptedException {
        //返回具体处理分块数据的recordReader类对象
        return null;
    }
    public static class MysqlInputSplit extends InputSplit{
        @Override
        public long getLength() throws IOException, InterruptedException {
            return 0;
        }

        @Override
        public String[] getLocations() throws IOException, InterruptedException {
            return new String[0];
        }
    }
}
