import com.asap.demo.model.RuleBean;
import com.asap.demo.model.StandardEvent;
import com.asap.demo.sourcefunc.MysqlSourceFunction;
import com.asap.demo.sourcefunc.MysqlSourceFunction2;
import com.asap.demo.utils.Constants;
import org.apache.flink.api.common.functions.RichFilterFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.common.state.ListStateDescriptor;
import org.apache.flink.api.common.state.MapStateDescriptor;
import org.apache.flink.api.common.state.ReadOnlyBroadcastState;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.runtime.testutils.MiniClusterResourceConfiguration;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.TimeCharacteristic;
import org.apache.flink.streaming.api.datastream.BroadcastStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.co.BroadcastProcessFunction;
import org.apache.flink.streaming.api.functions.timestamps.BoundedOutOfOrdernessTimestampExtractor;
import org.apache.flink.streaming.api.functions.windowing.RichWindowFunction;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.triggers.ContinuousEventTimeTrigger;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.test.util.MiniClusterWithClientResource;
import org.apache.flink.util.Collector;
import org.junit.ClassRule;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.IOException;
import java.sql.Timestamp;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;

/**
 * @author wangbh
 * @Description: test
 * @date 2021/10/16 14:14
 */
public class mysqlSourceFunction {
    private static final Logger logger = LoggerFactory.getLogger(FlinkCepSqlTest.class);

    @ClassRule
    public static MiniClusterWithClientResource flinkCluster =
            new MiniClusterWithClientResource(
                    new MiniClusterResourceConfiguration.Builder()
                            .setNumberSlotsPerTaskManager(3)
                            .setNumberTaskManagers(2)
                            .build());

    @Test
    public void test() throws Exception {

        try {
            Map<String, String> configMap = new HashMap<String, String>();
            configMap.put(Constants.DB_JDBC_USER, "root");
            configMap.put(Constants.DB_JDBC_PASSWD, "1qazXSW@3edc");
            configMap.put(Constants.DB_JDBC_URL, "jdbc:mysql://192.168.1.238:3306/SSA?useUnicode=true&characterEncoding=utf-8");
            configMap.put(Constants.DB_JDBC_DRIVER, "com.mysql.jdbc.Driver");
            configMap.put(Constants.INITAL_POOL_SIZE, "10");
            configMap.put(Constants.MIN_POOL_SIZE, "5");
            configMap.put(Constants.MAX_IDLE_TIME, "50");
            configMap.put(Constants.MAX_STATE_ELEMENTS, "100");
            configMap.put(Constants.MAX_IDLE_TIME, "60");

            final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
            env.setParallelism(1);
            env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);
            env.enableCheckpointing(Time.minutes(3).toMilliseconds(), CheckpointingMode.EXACTLY_ONCE);

            MapStateDescriptor<String, RuleBean> broadcastVar = new MapStateDescriptor<>("rule_broadcast_var", String.class, RuleBean.class);

          // ListStateDescriptor<List<String>> broadcastVarList = new ListStateDescriptor<List<String>>("rule_broadcast_var", String.class);

            BroadcastStream<RuleBean> broadcastStream = env.addSource(new MysqlSourceFunction2())
                    .name("mysql source")
                    .setParallelism(1)
                    .broadcast(broadcastVar);
            Properties browseProperties = new Properties();
            browseProperties.put("bootstrap.servers", "192.168.1.25:9092");
            browseProperties.put("group.id", "temporal");
            browseProperties.put("auto.offset.reset", "latest");
            DataStreamSource<String> dataStreamSource = env.addSource(new FlinkKafkaConsumer<String>(
                    "flink_pressure_test",
                    new SimpleStringSchema(),
                    browseProperties
            ));
            dataStreamSource.print("====>kafka");
            dataStreamSource
                    .map(StandardEvent::parse)
                    .filter(new RichFilterFunction<StandardEvent>() {
                        @Override
                        public boolean filter(StandardEvent event) throws Exception {
                            logger.info("====>filter");
                            String deviceParentType = event.getField("DEVICE_PARENT_TYPE");
                            return deviceParentType.equals("FW") || deviceParentType.equals("WAF");
                        }
                    })
                    .assignTimestampsAndWatermarks(new BoundedOutOfOrdernessTimestampExtractor<StandardEvent>(Time.seconds(10)) {
                        @Override
                        public long extractTimestamp(StandardEvent event) {
                            logger.info("====>extract");
                            String createTime = event.getField("CREATE_TIME");
                            return Timestamp.valueOf(createTime).getTime();
                        }
                    })
                    .keyBy(new KeySelector<StandardEvent, Object>() {
                        @Override
                        public Object getKey(StandardEvent event) throws Exception {
                            logger.info("====>keyBy");
                            return "SRC_PORT";
                        }
                    })
                    .timeWindow(Time.minutes(1))
                    .trigger(ContinuousEventTimeTrigger.of(Time.seconds(10)))
                    .apply(new RichWindowFunction<StandardEvent, String, Object, TimeWindow>() {
                        @Override
                        public void apply(Object o, TimeWindow window, Iterable<StandardEvent> input, Collector<String> out) throws Exception {

                        }
                    }).print();
            SingleOutputStreamOperator<Object> processStream = dataStreamSource.connect(broadcastStream)
                    .process(new BroadcastProcessFunction<String, RuleBean, Object>() {
                        @Override
                        public void processElement(String value, ReadOnlyContext ctx, Collector<Object> out) throws Exception {
                            ReadOnlyBroadcastState<String, RuleBean> broadcastState = ctx.getBroadcastState(broadcastVar);
                            for (Map.Entry<String, RuleBean> entry : broadcastState.immutableEntries()) {
                                logger.info("Entry key:{}, entry value:{}", entry.getKey(), entry.getValue());
                                System.out.println(entry.getKey()+"::::::"+entry.getValue());
                            }
                            out.collect(value);
                        }

                        @Override
                        public void processBroadcastElement(RuleBean value, Context ctx, Collector<Object> out) throws Exception {

                        }

                        @Override
                        public void open(Configuration parameters) throws Exception {
                            super.open(parameters);
                        }
                    });
            processStream.print();
            env.execute("rule demo");
        } catch (IOException e) {
            logger.error("=======properties not found=======");
            e.printStackTrace();
        } catch (Exception e) {
            logger.error("=======execute error=======");
            e.printStackTrace();
        }

    }
}
