package com.sink;


import com.Connection.JDBCConnectionTools;
import com.Pojo.IISLog;
import com.alibaba.fastjson.JSONObject;
import lombok.AllArgsConstructor;
import org.apache.flink.connector.jdbc.JdbcConnectionOptions;
import org.apache.flink.connector.jdbc.JdbcExecutionOptions;
import org.apache.flink.connector.jdbc.JdbcSink;
import org.apache.flink.connector.jdbc.JdbcStatementBuilder;
import org.apache.flink.streaming.api.functions.sink.SinkFunction;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.Serializable;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.sql.Timestamp;


//Flink使用的类都需要实现序列化，进行网络传输
@AllArgsConstructor
public class IISSink implements Serializable {

    private String sqltype;
    private static final Logger logger = LoggerFactory.getLogger(IISSink.class);


    public IISSink() {
    }

    //解析器必须使用FilebeatJSONKeyValueNginxDeserializationSchema才能使用该方法获取sink
    public SinkFunction<String> GetIISSinkString(String user, String password, String DBip, String databaseName
    ) throws Exception {

        SinkFunction<String> sink = JdbcSink.sink(
                "insert  into IISLOG (datetime,s_ip,cs_method,cs_uri_stem,cs_uri_query,s_port,cs_username,c_ip," +
                        "User_Agent,Referer,status,time_taken) " +
                        "values" +
                        " (?,?,?,?,?,?,?,?,?,?,?,?)",
                new JdbcStatementBuilder<String>() {
                    @Override
                    public void accept(PreparedStatement preparedStatement, String s) throws SQLException {
                        IISLog log = JSONObject.parseObject(s,IISLog.class);
                        logger.info(log.toString());
                        Timestamp localDateTimeToTimeStamp = Timestamp.valueOf(log.getDatetime());
                        preparedStatement.setTimestamp(1, localDateTimeToTimeStamp);    //preparedStatement只接收date和Timestamp，只能将LocalDateTime转为Timestamp再写入DB
                        preparedStatement.setString(2, log.getS_ip());
                        preparedStatement.setString(3, log.getCs_method());
                        preparedStatement.setString(4, log.getCs_uri_stem());
                        preparedStatement.setString(5, log.getCs_uri_query());
                        preparedStatement.setString(6, log.getS_port());
                        preparedStatement.setString(7, log.getCs_username());
                        preparedStatement.setString(8, log.getC_ip());
                        preparedStatement.setString(9, log.getUser_Agent());
                        preparedStatement.setString(10, log.getReferer());
                        preparedStatement.setString(11, log.getStatus());
                        preparedStatement.setString(12, log.getTime_taken());

                    }
                },
                JdbcExecutionOptions.builder()
                        .withMaxRetries(3)  //插入发生异常重试次数，Flink checkpoint has started
                        .withBatchSize(1000) //批处理最大条数，设置大一些，生产环境的访问频率高
                        .withBatchIntervalMs(500) //每次批量写入最小间隔时间
                        .build(),//只要满足其中一条，JDBC批处理就会执行
//                new JDBCConnectionTools().GetMssqlConnect(user,password,DBip,databaseName)
                new JDBCConnectionTools(this.sqltype).GetConnect(user,password,DBip,databaseName)
        );

        return sink;

    }
}
