package analysis;

import analysis.bean.Unit;
import analysis.bean.key.StatsUnit;
import analysis.bean.value.MapWritableValue;
import common.KPIType;
import org.apache.hadoop.mapreduce.*;

import java.io.IOException;
import java.sql.PreparedStatement;
import java.util.HashMap;
import java.util.Map;

public class MySQLOutputFormat extends OutputFormat<StatsUnit,MapWritableValue> {
    @Override
    public RecordWriter getRecordWriter(TaskAttemptContext taskAttemptContext) throws IOException, InterruptedException {
        return null;
    }

    @Override
    public void checkOutputSpecs(JobContext jobContext) throws IOException, InterruptedException {

    }

    @Override
    public OutputCommitter getOutputCommitter(TaskAttemptContext taskAttemptContext) throws IOException, InterruptedException {
        return null;
    }

    public static class MySQLredcordWriter extends RecordReader<StatsUnit,MapWritableValue>{

        public Map<KPIType, PreparedStatement> buffer = new HashMap<KPIType, PreparedStatement>();



        @Override
        public void initialize(InputSplit inputSplit, TaskAttemptContext taskAttemptContext) throws IOException, InterruptedException {

        }

        @Override
        public boolean nextKeyValue() throws IOException, InterruptedException {
            return false;
        }

        @Override
        public StatsUnit getCurrentKey() throws IOException, InterruptedException {
            return null;
        }

        @Override
        public MapWritableValue getCurrentValue() throws IOException, InterruptedException {
            return null;
        }

        @Override
        public float getProgress() throws IOException, InterruptedException {
            return 0;
        }

        @Override
        public void close() throws IOException {

        }
    }
}
