package Druid;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;
import org.apache.flink.util.Collector;

import java.util.*;


public class TransformJson {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        String inputPath ="F:\\大数据学习\\正式班\\第七阶段\\大数据开发正式班第七阶段模块二\\数据\\druid\\作业数据\\lagou_orders.json";
        DataStreamSource<String> data = env.readTextFile(inputPath);

        SingleOutputStreamOperator<String> maped = data.flatMap(new FlatMapFunction<String, String>() {
            @Override
            public void flatMap(String s, Collector<String> collector) throws Exception {
                Map<String, Object> map1 = new HashMap<String, Object>();

                Map<String, Object> map3 = new HashMap<String, Object>();
                List<Map<String, Object>> list = new ArrayList<Map<String, Object>>();
                JSONObject jsonObject = JSON.parseObject(s);
                for (String k : jsonObject.keySet()) {
                    //当key为products时，取出value进行进一步处理
                    if (k.equals("products")) {
                        Object v = jsonObject.get(k);
                        //判断是否为json数组
                        if (v instanceof JSONArray) {
                            //将json数组转为迭代器，进行处理
                            Iterator<Object> it = ((JSONArray) v).iterator();
                            while (it.hasNext()) {
                                Map<String, Object> map2 = new HashMap<String, Object>();
                                JSONObject json2 = (JSONObject) it.next();
                                //System.out.println(json2);
                                for (String stk : json2.keySet()) {
                                    Object o = json2.get(stk);
                                    map2.put(stk, o);

                                }
                                list.add(map2);
                            }
                        }

                    } else {
                        Object v = jsonObject.get(k);
                        map1.put(k, v);
                    }
                }
                for (Map<String, Object> stringObjectMap : list) {
                    //将list中的数据与map1分别组合
                    map3.putAll(map1);
                    map3.putAll(stringObjectMap);
                    String json = JSONObject.toJSONString(map3);
                    System.out.println("json:"+json);
                    collector.collect(json);
                }

            }
        });

        FlinkKafkaProducer<String> kafkaProducer = new FlinkKafkaProducer<>(
                "linux121:9092",
                "druidDemo",
                new SimpleStringSchema()
        );

        maped.addSink(kafkaProducer);

        env.execute();
    }

}
