package cn._51doit.day08;

import org.apache.flink.api.common.functions.AggregateFunction;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.connector.jdbc.JdbcConnectionOptions;
import org.apache.flink.connector.jdbc.JdbcExecutionOptions;
import org.apache.flink.connector.jdbc.JdbcSink;
import org.apache.flink.streaming.api.TimeCharacteristic;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.datastream.WindowedStream;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.windowing.ProcessWindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingProcessingTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.util.Collector;
import com.mysql.jdbc.Driver;
import java.util.Properties;

/**
 * @create: 2021-10-24 09:51
 * @author: 今晚打脑斧先森
 * @program: JDBCandMysql
 * @Description:
 *   使用jdbc sink 将数据写入到mysql中
 *   1.打开对应的页面：https://nightlies.apache.org/flink/flink-docs-release-1.14/docs/connectors/datastream/jdbc/
 *   2.导入依赖
 *    <dependency>
 *       <groupId>org.apache.flink</groupId>
 *       <artifactId>flink-connector-jdbc_2.12</artifactId>
 *       <version>1.13.2</version>
 *    </dependency>
 *   3.调用addSink传入jdbcSink
 **/
public class JDBCandMysql {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        DataStreamSource<String> lines = env.socketTextStream("doit01", 8888);
        SingleOutputStreamOperator<Tuple2<Integer, String>> mapped = lines.map(new MapFunction<String, Tuple2<Integer, String>>() {
            @Override
            public Tuple2<Integer, String> map(String value) throws Exception {
                String[] split = value.split(",");
                return Tuple2.of(Integer.parseInt(split[0]), split[1]);
            }
        });

//        env./*fromElements*/(mapped)
        mapped.addSink(JdbcSink.sink(
                        "insert into books values(?,?)",
                        (ps, t) -> {
                            //绑定参数
                            ps.setInt(1, t.f0);
                            ps.setString(2, t.f1);
//                            ps.setString(3, t.author);
//                            ps.setDouble(4, t.price);
//                            ps.setInt(5, t.qty);
                        },
                        JdbcExecutionOptions.builder()  //JDBC的执行选项
                        .withBatchSize(1000) //批量写入的数据条数
                        .withBatchIntervalMs(200)  //达到多长时间写入一次
                        .withMaxRetries(5)  //如果写入失败,重试次数
                        .build(),
                        new JdbcConnectionOptions.JdbcConnectionOptionsBuilder()    //JDBC的连接选项
                                .withUrl("jdbc:mysql://localhost:3306/db_doit26?characterEncoding=utf8")
                                .withDriverName("com.mysql.jdbc.Driver")
                                .withUsername("root")
                                .withPassword("root")
//                                .withConnectionCheckTimeoutSeconds(2)
                                .build()));

        env.execute();
    }

}
/**
 *  (ps, t) ->不用lambda表达式得到写法:
 *                         new JdbcStatementBuilder<Tuple2<String, String>>() {
 *                             @Override
 *                             public void accept(PreparedStatement preparedStatement, Tuple2<String, String> tp) throws SQLException {
 *                                 //绑定参数
 *                                 preparedStatement.setString(1, tp.f0);
 *                                 preparedStatement.setString(2, tp.f1);
 *                             }
 *                         }
 */
