package com.hjc.demo.flink.kafka.consumer;

import com.alibaba.fastjson.JSONObject;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.sink.SinkFunction;
import org.apache.flink.streaming.api.functions.source.SourceFunction;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;

public class FlinkKafkaProducerDemo {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment environment = StreamExecutionEnvironment.getExecutionEnvironment();

        SinkFunction dataSource = new FlinkKafkaProducer(CommonProperties.brokerList, CommonProperties.TOPIC, new SimpleStringSchema());
        environment.addSource(new SourceFunction<String>(){

            @Override
            public void run(SourceContext<String> ctx) throws Exception {
                for (int i = 0; i < 100; i++) {
                    JSONObject jsonObject = new JSONObject();
                    jsonObject.put("src_ip", "1.12.3." + (i + 1));
                    jsonObject.put("dst_ip", "12.3.4.166");
                    ctx.collect(jsonObject.toJSONString());
                }
            }

            @Override
            public void cancel() {

            }
        }).addSink(dataSource);
        environment.execute("kafka-producer");

    }
}
