package com.example.stream;

import com.example.db.ConnectionPool;
import com.example.entity.User;
import org.apache.spark.sql.ForeachWriter;

import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.SQLException;

public class MysqlForeachWriter<T> extends ForeachWriter<User> {

    private Connection connection = null;
    private PreparedStatement pst = null;
    private int batchCount = 0;

    @Override
    public boolean open(long partitionId, long version) {
        try {
            String sql = "INSERT INTO user(id, name, age, email, timestamp) VALUES(?, ?, ?, ?, ?)";
            connection = ConnectionPool.getConnection();
            pst = connection.prepareStatement(sql);
        } catch (SQLException e) {
            e.printStackTrace();
        }
        if (connection != null && pst != null) {
            return true;
        }
        return false;
    }

    @Override
    public void process(User tUser) {
        try {
            pst.setLong(1, tUser.getId());
            pst.setString(2, tUser.getName());
            pst.setInt(3, tUser.getAge());
            pst.setString(4, tUser.getEmail());
            pst.setTimestamp(5, tUser.getTimestamp());
            pst.addBatch();
            batchCount++;
            if (batchCount >= 20) {
                pst.executeBatch();
                connection.commit();
                batchCount = 0;
                pst.clearBatch();
            }
        } catch (SQLException e) {
            e.printStackTrace();
        }
    }

    @Override
    public void close(Throwable errorOrNull) {
        try {
            pst.executeBatch();
            connection.commit();
            batchCount = 0;
            System.out.println("提交数据，释放资源");
            ConnectionPool.returnConnection(connection);
        } catch (SQLException e) {
            e.printStackTrace();
        }
    }
}
