package com.atguigu.flink.chapter05.sink;

import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.sink.RichSinkFunction;
import org.apache.flink.util.Collector;

import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement;

public class CustomSink {
    public static void main(String[] args) {
        Configuration conf = new Configuration();
        conf.setInteger("rest.port", 2000);
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(conf);
        env.setParallelism(1);
        SingleOutputStreamOperator<Tuple2<String, Long>> resultStream = env
                .socketTextStream("hadoop162", 8888)
                .flatMap(new FlatMapFunction<String, Tuple2<String, Long>>() {
                    @Override
                    public void flatMap(String value, Collector<Tuple2<String, Long>> out) throws Exception {
                        for (String word : value.split(" ")) {
                            out.collect(Tuple2.of(word, 1L));
                        }
                    }
                })
                .keyBy(t -> t.f0)
                .sum(1);

        resultStream.addSink(new RichSinkFunction<Tuple2<String, Long>>() {
            private Connection conn;
            @Override
            public void open(Configuration parameters) throws Exception {
                //1、加载jdbc驱动
                Class.forName("jdbc.mysql.cj.jdbc.Driver");
                //2、通过驱动管理器获取链接对象 alt+ctrl+f
                conn = DriverManager.getConnection("jdbc:mysql://hadoop102:3306/flink?useSSL=false", "root", "000000");
            }

            @Override
            public void close() throws Exception {
                if (conn != null){
                    conn.close();
                }
            }

            //参数1 ： 要sink出去的数据
            @Override
            public void invoke(Tuple2<String, Long> t,
                               Context ctx) throws Exception {
                //使用jdbc把数据写出到mysql中
                //获取预处理语句
                PreparedStatement ps = conn.prepareStatement("replace into wc(word,ct) values (?,?)");
                //给占位符赋值
                ps.setString(1,t.f0);
                ps.setLong(2,t.f1);
                //执行预处理语句
                ps.execute();   //ddl 和 增删改
                //ps.executeQuery(); //执行查询

                //关闭预处理语句
                ps.close();
            }
        });
        try {
            env.execute();
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
}


//public class CustomSink {
//    public static void main(String[] args) {
//        Configuration conf = new Configuration();
//        conf.setInteger("rest.port", 2000);
//        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(conf);
//        env.setParallelism(1);
//        SingleOutputStreamOperator<Tuple2<String, Long>> resultStream = env
//                .socketTextStream("hadoop162", 8888)
//                .flatMap(new FlatMapFunction<String, Tuple2<String, Long>>() {
//                    @Override
//                    public void flatMap(String value, Collector<Tuple2<String, Long>> out) throws Exception {
//                        for (String word : value.split(" ")) {
//                            out.collect(Tuple2.of(word, 1L));
//                        }
//                    }
//                })
//                .keyBy(t -> t.f0)
//                .sum(1);
//        resultStream.addSink(new RichSinkFunction<Tuple2<String, Long>>() {
//            private Connection conn;
//            @Override
//            public void open(Configuration parameters) throws Exception {
//                //加载jdbc驱动
//                Class.forName("com.mysql.cj.jdbc.Driver");
//                //通过驱动管理器获取链接对象
//                DriverManager.getConnection("jdbc:mysql://hadoop102:3306/flink?useSSL=false","root","000000");
//            }
//
//            @Override
//            public void close() throws Exception {
//                if (conn != null){
//                    conn.close();
//                }
//            }
//
//            //参数1 ： 要sink出去的数据
//            @Override
//            public void invoke(Tuple2<String, Long> t,
//                               Context ctx) throws Exception {
//                //使用jdbc把数据写入到mysql中
//                //获取预处理语句
//                PreparedStatement ps = conn.prepareStatement("replace into wc(word,ct) values(?,?)");
//                //给占位符赋值
//                ps.setString(1,t.f0);
//                ps.setLong(2,t.f1);
//                //执行预处理语句
//                ps.execute();  //ddl 和 增删改
//                //ps.executeQuery(); //执行查询
//                //关闭预处理语句
//                ps.close();
//            }
//        });
//        try {
//            env.execute();
//        } catch (Exception e) {
//            e.printStackTrace();
//        }
//    }
//}


//public class CustomSink {
//    public static void main(String[] args) {
//        Configuration conf = new Configuration();
//        conf.setInteger("rest.port", 2000);
//        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(conf);
//        env.setParallelism(1);
//        SingleOutputStreamOperator<Tuple2<String, Long>> resultStream = env
//                .socketTextStream("hadoop162", 8888)
//                .flatMap(new FlatMapFunction<String, Tuple2<String, Long>>() {
//                    @Override
//                    public void flatMap(String value, Collector<Tuple2<String, Long>> out) throws Exception {
//                        for (String word : value.split(" ")) {
//                            out.collect(Tuple2.of(word, 1L));
//                        }
//                    }
//                })
//                .keyBy(t -> t.f0)
//                .sum(1);
//        resultStream.addSink(new RichSinkFunction<Tuple2<String, Long>>() {
//            private Connection conn;
//            @Override
//            public void open(Configuration parameters) throws Exception {
//                //加载jdbc驱动
//                Class.forName("com.mysql.cj.jdbc.Driver");
//                //通过驱动管理器获取链接对象
//                conn = DriverManager.getConnection("jdbc:mysql://hadoop102:3306/flink?useSSL=false","root","000000");
//            }
//
//            @Override
//            public void close() throws Exception {
//                if (conn != null) {
//                    conn.close();
//                }
//            }
//
//            //参数1 ： 要sink 出去的数据
//            @Override
//            public void invoke(Tuple2<String, Long> t,
//                               Context ctx) throws Exception {
//                //使用jdbc把数据写出到mysql中
//                //获取预处理语句
//                PreparedStatement ps = conn.prepareStatement("replace into wc(word,ct) values (?,?)");
//                //给占位符赋值
//                ps.setString(1,t.f0);
//                ps.setLong(2,t.f1);
//                //执行预处理语句
//                ps.execute();  //ddl和增删改
//                ps.executeQuery(); //执行查询
//
//                //关闭预处理语句
//                ps.close();
//            }
//        });
//        try {
//            env.execute();
//        } catch (Exception e) {
//            e.printStackTrace();
//        }
//    }
//}