import org.apache.flink.api.common.serialization.SimpleStringEncoder;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.connector.file.sink.FileSink;
import org.apache.flink.core.fs.Path;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.sink.RichSinkFunction;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;

import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement;

public class Demo02_sink {

    /**
     * 下沉到文件
     * @param resultDataStream
     */
    public static void sink_file(DataStream<String> resultDataStream){

        FileSink fileSink = FileSink
                .forRowFormat(
                        new Path("E://result"),
                        new SimpleStringEncoder<String>()
                )
                .build();

        resultDataStream.sinkTo(fileSink);
    }

    /**
     * 下沉到socket
     * @param resultDataStream
     */
    public static void sink_socket(DataStream<String> resultDataStream){
        resultDataStream.writeToSocket("node101",8888,new SimpleStringSchema());
    }

    /**
     * 下沉到kafka
     * @param resultDataStream
     */
    public static void sink_kafka(DataStream<String> resultDataStream){
        resultDataStream.addSink(
            new FlinkKafkaProducer<String>(
                  "node101:9092,node102:9092,node103:9092,node104:9092",
                  "supermarket",
                  new SimpleStringSchema()
            )
        );
    }

    /**
     * 自定义sink
     * @param resultDataStream
     */
    public static void sink_mysql(DataStream<String> resultDataStream){
        resultDataStream
            .map(sensorStr->{
                String[] fields = sensorStr.split(",");
                return new Demo02_sensorReading(
                        fields[0],
                        Long.parseLong(fields[1]),
                        Double.parseDouble(fields[2])
                );
            })
            .addSink(
                new RichSinkFunction<Demo02_sensorReading>() {

                    Connection conn = null;
                    PreparedStatement ps_insert = null;
                    PreparedStatement ps_update = null;

                    @Override
                    public void open(Configuration parameters) throws Exception {
                        Class.forName("com.mysql.jdbc.Driver");
                        conn = DriverManager.getConnection(
                                "jdbc:mysql://localhost:3306/flink",
                                "root",
                                "root"
                        );
                        ps_insert = conn.prepareStatement("insert into sensor values(?,?,?)");
                        ps_update = conn.prepareStatement("update sensor set ts=?,temp=? where id=?");
                    }

                    @Override
                    public void invoke(Demo02_sensorReading value, Context context) throws Exception {

                        ps_update.setLong(1,value.getTimestamp());
                        ps_update.setDouble(2,value.getTemperature());
                        ps_update.setString(3,value.getSensorId());

                        int updateCount = ps_update.executeUpdate();

                        if(updateCount==0){
                            ps_insert.setString(1,value.getSensorId());
                            ps_insert.setLong(2,value.getTimestamp());
                            ps_insert.setDouble(3,value.getTemperature());

                            int insertCount = ps_insert.executeUpdate();

                            if(insertCount==1){
                                System.out.println("添加成功");
                            }else{
                                System.out.println("修改或添加失败");
                            }
                        }else if(updateCount==1){
                            System.out.println("修改成功");
                        }
                    }

                    @Override
                    public void close() throws Exception {
                        if(ps_update!=null){
                            ps_update.close();
                        }
                        if(ps_insert!=null){
                            ps_insert.close();
                        }
                        if(conn!=null){
                            conn.close();
                        }
                    }
                }
            );
    }

    public static void main(String[] args) {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        //接收原始数据
        DataStreamSource<String> dataStream = Demo02_source.source_socket(env,9999);


//        sink_file(dataStream);
//        sink_socket(dataStream);
//        sink_kafka(dataStream);
        sink_mysql(dataStream);

        try {
            env.execute("sink");
        } catch (Exception e) {
            throw new RuntimeException(e);
        }
    }
}
