package com.liao.intimacy;

import com.liao.kv.base.BaseValue;
import com.liao.kv.value.CountDurationValue;
import com.liao.util.JDBCCacheBean;
import com.liao.util.JDBCUtil;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.*;
import org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

import java.io.IOException;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.SQLException;

public class RankMySQLOutputFormat extends OutputFormat<Text, CountDurationValue> {

    @Override
    public RecordWriter<Text, CountDurationValue> getRecordWriter(TaskAttemptContext taskAttemptContext) throws IOException, InterruptedException {
        //创建jdbc连接
        Connection conn = null;
        try {
            conn = JDBCCacheBean.getInstance();//单例获取Connection
            //关闭自动提交，以便于批量提交
            conn.setAutoCommit(false);
        } catch (SQLException e) {
            throw new IOException(e);
        }
        return new RankMysqlRecordWriter(conn);
    }

    @Override
    public void checkOutputSpecs(JobContext jobContext) throws IOException, InterruptedException {

    }

    @Override
    public OutputCommitter getOutputCommitter(TaskAttemptContext taskAttemptContext) throws IOException, InterruptedException {
        String name = taskAttemptContext.getConfiguration().get(FileOutputFormat.OUTDIR);
        Path output = name == null ? null : new Path(name);
        return new FileOutputCommitter(output, taskAttemptContext);
    }

    private static class RankMysqlRecordWriter extends RecordWriter<Text, CountDurationValue> {

        private Connection conn;
        private PreparedStatement preparedStatement = null;
        private int batchNumber = 300;//缓存sql条数的边界条数
        int count = 0;//用于计数客户端已经缓存的条数

        public RankMysqlRecordWriter(Connection conn) {
            this.conn = conn;
        }

        @Override
        public void write(Text text, CountDurationValue value) throws IOException, InterruptedException {
//            CountDurationValue value=(CountDurationValue) v;
            this.conn=JDBCCacheBean.getInstance();
            String sql = "INSERT INTO `tb_intimacy`(`intimacy_rank`, `contact_id1`, `contact_id2`, `call_count`, `call_duration_count`) VALUES(?, ?, ?, ?, ?);";
            Converter conveter=new Converter();
            int intimacy_rank=1;
            int contactId1=conveter.getPhoneId(text.toString().split("_")[0]);
            int contactId2=conveter.getPhoneId(text.toString().split("_")[1]);
            int call_count=value.getCallSum();
            int call_duration_count=value.getCallDurationSum();

            try {
                if (preparedStatement == null)
                    preparedStatement = conn.prepareStatement(sql);
                int i = 0;//用于填充preparedStatement的参数索引
                preparedStatement.setInt(++i, intimacy_rank);
                preparedStatement.setInt(++i, contactId1);
                preparedStatement.setInt(++i, contactId2);
                preparedStatement.setInt(++i, call_count);
                preparedStatement.setInt(++i, call_duration_count);

                preparedStatement.addBatch();//将SQL缓存到客户端
                //当前缓存了多少个sql语句等待批量执行，计数器
                count++;
                // 批量提交
                if (count >= batchNumber) {
                    preparedStatement.executeBatch(); // 批量提交
                    conn.commit(); // 因为之前设置的是不自动提交，所以这里提交一下事务
                    count = 0;
                }
            } catch (SQLException e) {
                e.printStackTrace();
            }
        }

        @Override
        public void close(TaskAttemptContext taskAttemptContext) throws IOException, InterruptedException {
            try {
                preparedStatement.executeBatch();// 批量提交
                conn.commit();//那个如果积累只有400+的条数，判断条件要大于500才提交的话，所以再关闭前需要再提交一次
            } catch (SQLException e) {
                e.printStackTrace();
            }finally {
                JDBCUtil.close(conn, preparedStatement, null);
            }
        }
    }
}
