package day9;

import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.common.typeinfo.TypeHint;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.api.java.tuple.Tuple3;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

import java.util.Properties;

import static org.apache.flink.table.api.Expressions.$;

public class Test4_kafka_to_mysql {
    public static void main(String[] args) throws Exception {
        // 从kafka读数据到mysql
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        StreamTableEnvironment tenv = StreamTableEnvironment.create(env);

        Properties prop = new Properties();
        prop.setProperty("bootstrap.servers","hadoop10:9092");
        prop.setProperty("group.id","x1");

        FlinkKafkaConsumer consumer = new FlinkKafkaConsumer<String>("topic1",new SimpleStringSchema(),prop);
        DataStreamSource ds1 = env.addSource(consumer);

        SingleOutputStreamOperator ds2 = ds1.map(new MapFunction<String, Tuple3<String, Integer, String>>() {
            @Override
            public Tuple3<String, Integer, String> map(String value) throws Exception {
                String[] arr = value.split(",");
                return Tuple3.of(arr[0], Integer.parseInt(arr[1]), arr[2]);
            }
        });

        tenv.createTemporaryView("table1",ds2,$("username"),$("age"),$("status"));
        Table table = tenv.sqlQuery("select * from table1 where status = 'success'");

        // 转换操作 只有转成DataStream才能打印
        DataStream<Tuple2<Boolean, Tuple3<String, Integer, String>>> ds3 =
                tenv.toRetractStream(table, TypeInformation.of(
                        new TypeHint<Tuple3<String, Integer, String>>() {
        }));

        ds3.print();

        env.execute();
    }
}
