package net.bwie.realtime.dws.logs.job;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import net.bwie.realtime.dwd.douyin.logs.bean.TransactionAccumulator;
import net.bwie.realtime.jtp.utils.DateTimeUtil;
import net.bwie.realtime.jtp.utils.KafkaUtil;
import net.bwie.realtime.jtp.utils.KafkaUtils;
import org.apache.flink.api.common.functions.FilterFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.api.java.tuple.Tuple4;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.KeyedProcessFunction;
import org.apache.flink.util.Collector;

import java.text.SimpleDateFormat;

public class DwsTransactionCount {
    public static void main(String[] args) throws Exception{
        //1.执行环境-env
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        //2.数据源-source
        DataStream<String> kafkaDataStream = KafkaUtil.consumerKafka(env, "dws_event_pay_transaction");

        //3.数据转换-transformation
        DataStream<String> pageStream = handle(kafkaDataStream);
        pageStream.print();

        KafkaUtils.producerKafka(pageStream,"dws_transaction_count");

        //5.触发执行-execute
        env.execute("EventPayAssociation");
    }

    private static DataStream<String> handle(DataStream<String> stream) {
        SingleOutputStreamOperator<String> filter = stream.filter(new FilterFunction<String>() {
            @Override
            public boolean filter(String s) throws Exception {
                JSONObject jsonObject = JSON.parseObject(s);
                String payStatus = jsonObject.getJSONObject("pay_info").getString("payStatus");
                if (payStatus.equals("success")) {
                    return true;
                } else {
                    return false;
                }
            }
        });

        // 提取用户ID和直播间ID的元组
        DataStream<Tuple4<String, String, String,Long>> userDataStream = filter
                .map(new MapFunction<String, Tuple4<String, String, String,Long>>() {
                    @Override
                    public Tuple4<String, String, String,Long> map(String value) throws Exception {
                        JSONObject json = JSON.parseObject(value);
                        JSONObject payInfo = json.getJSONObject("pay_info");
                        JSONObject eventInfo = json.getJSONObject("event_info");
                        String liveRoomId = eventInfo.getString("liveRoomId");
                        String userId = eventInfo.getString("userId"); // 假设有userId字段
                        String orderId = payInfo.getString("orderId"); // 假设有orderId字段

                        String everTime = eventInfo.getString("enterTime");
                        Long timestamp = DateTimeUtil.convertStringToLong(everTime, "yyyy-MM-dd HH:mm:ss");

                        return new Tuple4<>(liveRoomId, userId, orderId,timestamp);
                    }
                });

        // 按直播间ID分组
        KeyedStream<Tuple4<String, String, String,Long>, String> keyedByLiveRoom = userDataStream
                .keyBy(tuple -> tuple.f0);

        // 处理分组数据，计算订单数和订单人数
        DataStream<String> resultStream = keyedByLiveRoom.process(new KeyedProcessFunction<String,
                Tuple4<String, String, String,Long>, String>() {

            // 使用状态来存储每个直播间的统计信息
            private transient ValueState<TransactionAccumulator> statsState;
            private transient SimpleDateFormat sdf;

            @Override
            public void open(Configuration parameters) throws Exception {
                // 定义状态描述符
                ValueStateDescriptor<TransactionAccumulator> descriptor = new ValueStateDescriptor<>(
                        "liveRoomStats",
                        TypeInformation.of(TransactionAccumulator.class)
                );
                statsState = getRuntimeContext().getState(descriptor);
                sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
            }

            @Override
            public void processElement(
                    Tuple4<String, String, String,Long> value,
                    KeyedProcessFunction<String, Tuple4<String, String, String,Long>, String>.Context ctx,
                    Collector<String> out) throws Exception {

                String liveRoomId = value.f0;
                String userId = value.f1;
                String orderId = value.f2;
                Long timestamp = value.f3;

                TransactionAccumulator currentStats = statsState.value();
                if (currentStats == null) {
                    currentStats = new TransactionAccumulator(liveRoomId,sdf);
                }

                // 更新统计信息
                currentStats.addOrder(orderId, userId,timestamp);

                // 更新状态
                statsState.update(currentStats);

                // 输出当前统计结果
                out.collect(JSON.toJSONString(currentStats));
            }
        });

        return resultStream;
    }
}
