package top.wintp.offlinedataanalysis.anlyser.mr.nu;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapred.FileOutputCommitter;
import org.apache.hadoop.mapreduce.JobContext;
import org.apache.hadoop.mapreduce.OutputCommitter;
import org.apache.hadoop.mapreduce.OutputFormat;
import org.apache.hadoop.mapreduce.RecordWriter;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.ibatis.session.SqlSession;
import org.apache.ibatis.session.SqlSessionFactory;

import java.io.IOException;

import top.wintp.offlinedataanalysis.anlyser.dim.StatsUserDimension;
import top.wintp.offlinedataanalysis.anlyser.value.reduce.MapWritableValue;
import top.wintp.offlinedataanalysis.util.MyBatisUtils;

/**
 * description:
 * <p>
 * author:  upuptop
 * <p>
 * qq: 337081267
 * <p>
 * CSDN:   http://blog.csdn.net/pyfysf
 * <p>
 * cnblogs:   http://www.cnblogs.com/upuptop
 * <p>
 * blog:   http://wintp.top
 * <p>
 * email:  pyfysf@163.com
 * <p>
 * time: 2019/08/2019/8/27
 * <p>
 */
public class MySqlOutputFormat extends OutputFormat<StatsUserDimension, MapWritableValue> {

    @Override
    public RecordWriter<StatsUserDimension, MapWritableValue> getRecordWriter(TaskAttemptContext context) throws IOException, InterruptedException {
        //获取sessionFactory
        SqlSessionFactory sqlSessionFactory = MyBatisUtils.getSqlSessionFactory();
        SqlSession session = sqlSessionFactory.openSession(false);

        MySqlCollector mySqlCollector = new MySqlCollector(session);
//        创建recordWriter对象
        MysqlRecordWriter mysqlRecordWriter = new MysqlRecordWriter(mySqlCollector);
        return mysqlRecordWriter;
    }

    @Override
    public void checkOutputSpecs(JobContext context) throws IOException, InterruptedException {

    }

    @Override
    public OutputCommitter getOutputCommitter(TaskAttemptContext context) throws IOException, InterruptedException {
        FileOutputCommitter foc = new FileOutputCommitter();
        return foc;
    }
}
