package com.zhangyang.flink.cdc;

import org.apache.flink.connector.base.DeliveryGuarantee;
import org.apache.flink.connector.kafka.sink.KafkaRecordSerializationSchema;
import org.apache.flink.connector.kafka.sink.KafkaSink;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.source.RichParallelSourceFunction;

import java.util.HashMap;
import java.util.Map;
import java.util.Random;

/**
 * @author Dell
 * @Title:
 * @Description: 请写注释类
 * @date 2023/2/2
 */
public class KafkaSinkFormatMain {
    public static void main(String[] args) throws Exception {

        final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        DataStreamSource<Map<String, Object>> mapDataStreamSource = env.addSource(new DataSource());
//        AvroInputFormat<Map<String,Object>> users = new AvroInputFormat<Map<String,Object>>(mapDataStreamSource.forward(), Map.class);
//        DataStream<Map<String,Object>> usersDS = env.createInput(users);
//
//        usersDS.print("");

        KafkaSink<Map<String,Object>> sink = KafkaSink.<Map<String,Object>>builder()
                .setBootstrapServers( "10.215.5.18:9092")
                .setRecordSerializer(KafkaRecordSerializationSchema.builder()
                        .setTopic( "soc-event-topic-test")
//                        .setPartitioner(new FlinkFixedPartitioner<>())
                        .setValueSerializationSchema(new MapDeserializationSchema())
                        .build()
                )
                .setDeliverGuarantee(DeliveryGuarantee.AT_LEAST_ONCE)

                .build();
        mapDataStreamSource.sinkTo(sink).name("输出");
        env.execute("制造数据");
    }


    private static class DataSource extends RichParallelSourceFunction<Map<String,Object>> {
        private volatile boolean running = true;


        @Override
        public void run(SourceContext<Map<String,Object>> ctx) throws Exception {
            while(running){


            Random random = new Random(System.currentTimeMillis());
            long l = System.currentTimeMillis();
            Map<String, Object> logEventScan = new HashMap<>();
            logEventScan.put("pro", "dns");
            logEventScan.put("domain", "baidu");
            logEventScan.put("port", "555");
            logEventScan.put("ip", "1.1.1.1");
            logEventScan.put("time", l);
            ctx.collectWithTimestamp(logEventScan, l);
            Map<String, Object> logEventScans = new HashMap<>();
            logEventScans.put("pro", "dns");
            logEventScans.put("domain", "baidu");
            logEventScans.put("port", "444");
            logEventScans.put("ip", "1.1.1.1");
            logEventScans.put("time", l + 1);
            ctx.collectWithTimestamp(logEventScans, l + 1);
//                    Thread.sleep( 500);
//            long l1 = l + 1000 * 60;
            Map<String, Object> logEventScan1 = new HashMap<String, Object>();
            logEventScan1.put("pro", "Windows");
            logEventScan1.put("domain", "RRS");
            logEventScan1.put("port", "333");
            logEventScan1.put("ip", "1.1.1.1");
            logEventScan1.put("time", l + 2);

            ctx.collectWithTimestamp(logEventScan1, l + 2);
            Map<String, Object> logEventScan1_1 = new HashMap<String, Object>();
            logEventScan1_1.put("pro", "Windows");
            logEventScan1_1.put("domain", "RRS");
            logEventScan1_1.put("port", "333");
            logEventScan1_1.put("ip", "1.1.1.1");
            logEventScan1_1.put("time", l + 3);

            ctx.collectWithTimestamp(logEventScan1_1, l + 3);
            Thread.sleep(5000);
            }
        }

        @Override
        public void cancel() {

        }
    }

    }
