package com.lagou;

import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;

import java.util.Properties;

public class SourceFromKafka {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        String topic = "druiddata";
        Properties props = new Properties();
        props.setProperty("bootstrap.servers","zb26105:9092");
        props.setProperty("group.id","mygp");

        FlinkKafkaConsumer<String> consumer = new FlinkKafkaConsumer<String>(topic, new SimpleStringSchema(), props);
        consumer.setStartFromGroupOffsets();

        consumer.setStartFromEarliest();

        DataStreamSource<String> data = env.addSource(consumer);
        int parallelism = data.getParallelism();
        System.out.println("...parallelism" + parallelism);

        SingleOutputStreamOperator<String> maped = data.map(new MapFunction<String, String>() {
            @Override
            public String map(String value) throws Exception {
                System.out.println(value);
                //一条数据变两条数据，因此不能直接return  用一个StringBuilder
                StringBuffer s4 = new StringBuffer();
                try {
                    JSONObject argsJson = JSONObject.parseObject(value);

                    Long ts = argsJson.getLong("ts");
                    String orderId = argsJson.getString("orderId");
                    String userId = argsJson.getString("userId");
                    String orderStatusId = argsJson.getString("orderStatusId");
                    String orderStatus = argsJson.getString("orderStatus");
                    Integer payModeId = argsJson.getInteger("payModeId");
                    String payMode = argsJson.getString("payMode");
                    String payment = argsJson.getString("payment");
                   JSONArray products = argsJson.getJSONArray("products");
                    for (Object product : products) {
                        JSONObject newJson = new JSONObject();
//                        String productString = product.toString();
                        System.out.println("------------");
//                        System.out.println(productString);

                        newJson.put("ts", ts);
                        newJson.put("orderId", orderId);
                        newJson.put("userId", userId);
                        newJson.put("orderStatusId", orderStatusId);
                        newJson.put("orderStatus", orderStatus);
                        newJson.put("payModeId", payModeId);
                        newJson.put("payMode", payMode);
                        newJson.put("payment", payment);
                        newJson.put("product", product);
//
                        System.out.println(newJson.toString());
                        s4.append(newJson.toString()+"\n");
//                        return productString;
                    }
//
                } catch (Exception e) {
                    e.printStackTrace();
                }
               return s4.toString();
            }


        });


        maped.addSink(new FlinkKafkaProducer<String>(
                "druiddata_write4",new SimpleStringSchema(),props)
        ).name("flink-connectors-kafka")
                .setParallelism(1);


//        maped.print();
        env.execute("flink learning connectors kafka");

    }
}
