package kk.learn.flink.work._2;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;
import org.apache.flink.util.Collector;

import java.util.Properties;

/**
 * <p>
 *
 * </p>
 *
 * @author KK
 * @since 2021-05-05
 */
public class CleanData {

    public static void main(String[] args) throws Exception {
        //从Kafka读取数据;
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        Properties properties = new Properties();
        properties.setProperty("bootstrap.servers", "linux121:9092");
        FlinkKafkaConsumer<String> consumer = new FlinkKafkaConsumer<String>("kylin_origin", new SimpleStringSchema(), properties);
        DataStreamSource<String> data = env.addSource(consumer);

        //转换;
        final SingleOutputStreamOperator<String> result = data.flatMap(new FlatMapFunction<String, String>() {
            public void flatMap(String s, Collector<String> collector) throws Exception {
                // todo 解析成过个json字符串
                final JSONObject jsonObject = JSON.parseObject(s);
                final JSONArray products = jsonObject.getJSONArray("products");
                jsonObject.remove("products");
                for (Object product : products) {
                    JSONObject record = new JSONObject();
                    record.putAll(jsonObject);
                    record.put("product", product);
                    String str = JSON.toJSONString(record, false);
                    collector.collect(str);
                }
            }
        });


        //然后输出到另外一个kafka主题;
        String brokerList = "linux121:9092";
        String topic = "kylin_cleaned";
        FlinkKafkaProducer<String> producer = new FlinkKafkaProducer<>(brokerList, topic, new SimpleStringSchema());
        result.addSink(producer);
        env.execute();
    }
}
