package com.atguigu.gmall.realtime.tutorial.sql;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.gmall.realtime.tutorial.beans.Sensor;
import com.atguigu.gmall.realtime.utils.MyKafkaUtil;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

import static org.apache.flink.table.api.Expressions.$;

public class SQLTEST_Query {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        //CK设置

        //TODO 1 定义Table流环境
        EnvironmentSettings settings = EnvironmentSettings.newInstance()
                .inStreamingMode()
                .build();

        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env,settings);


        String topic = "sensor";
        String groupId = "sensor_query";

        FlinkKafkaConsumer<String> kafkaSource = MyKafkaUtil.getKafkaSource("localhost:9092",topic,groupId);
        DataStreamSource<String> jsonDStream = env.addSource(kafkaSource);

        DataStream<Sensor> jsonStream = jsonDStream.map(info->{
            String[] split = info.split(",");
            return new Sensor(split[0],Long.valueOf(split[1]),Double.valueOf(split[2]));
        });

        Table table = tableEnv.fromDataStream(jsonStream);
        Table sqlTable  = table.where($("temperature").isGreater(38.0));

        DataStream<Sensor> sensorDataStream = tableEnv.toAppendStream(sqlTable, Sensor.class);
        sensorDataStream.print();

        env.execute();


    }
}
