package cn.doitedu.api;

import beans.ProductAmount;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.FilterFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.kafka.clients.consumer.OffsetResetStrategy;

/**
 * @Author: 深似海
 * @Site: <a href="www.51doit.com">多易教育</a>
 * @QQ: 657270652
 * @Date: 2024/2/21
 * @Desc: 学大数据，上多易教育
 * <p>
 * 请从 kafka 中读取如下 用户行为记录数据
 * {"uid":1,"event_id":"add_cart","timestamp":1708507374000,"properties":{"product_id":102,"quantity":2,"product_price":18.8}}
 * {"uid":1,"event_id":"add_cart","timestamp":1708507375000,"properties":{"product_id":103,"quantity":1,"product_price":188}}
 * {"uid":1,"event_id":"add_cart","timestamp":1708507376000,"properties":{"product_id":102,"quantity":1,"product_price":18.8}}
 * {"uid":1,"event_id":"item_share","timestamp":1708507377000,"properties":{"item_id":"item001","share_method":"qq"}}
 * <p>
 * 计算每个用户，加购物车的每种商品的金额总和
 * 结果形式：    谁？ 哪个商品？ 总金额？
 **/
public class _04_KafkaSource_Exercise {

    public static void main(String[] args) throws Exception {


        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        // 构造一个kafkaSource对象
        KafkaSource<String> kafkaSource = KafkaSource.<String>builder()
                .setBootstrapServers("doitedu:9092")
                .setStartingOffsets(OffsetsInitializer.committedOffsets(OffsetResetStrategy.LATEST))
                .setGroupId("g2")
                .setTopics("doit46")
                .setValueOnlyDeserializer(new SimpleStringSchema())
                .build();


        // 添加数据源到env
        DataStreamSource<String> stream = env.fromSource(kafkaSource, WatermarkStrategy.noWatermarks(), "deepSea");

        // 过滤出所有add_cart事件
        SingleOutputStreamOperator<String> filtered = stream.filter(new FilterFunction<String>() {
            @Override
            public boolean filter(String json) throws Exception {
                JSONObject jsonObject = JSON.parseObject(json);
                String eventId = jsonObject.getString("event_id");

                return eventId.equals("add_cart");
            }
        });


        // 解析json
        SingleOutputStreamOperator<ProductAmount> beanStream = filtered.map(new MapFunction<String, ProductAmount>() {
            @Override
            public ProductAmount map(String json) throws Exception {
                JSONObject jsonObject = JSON.parseObject(json);
                int uid = jsonObject.getIntValue("uid");

                JSONObject properties = jsonObject.getJSONObject("properties");
                int productId = properties.getIntValue("product_id");
                int quantity = properties.getIntValue("quantity");
                double productPrice = properties.getDoubleValue("product_price");

                return new ProductAmount(uid, productId, quantity * productPrice);
            }
        });


        // 分组聚合
        beanStream.keyBy(new KeySelector<ProductAmount, Tuple2<Integer, Integer>>() {
                    @Override
                    public Tuple2<Integer, Integer> getKey(ProductAmount bean) throws Exception {
                        return Tuple2.of(bean.getUid(), bean.getProduct_id());
                    }
                })
                .sum("amount")
                .print();


        env.execute();

    }


}
