package com.atguigu.gmall.realtime.app.dwm;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.gmall.realtime.app.func.DimAsyncFunctionNew1;
import com.atguigu.gmall.realtime.bean.OrderDetail;
import com.atguigu.gmall.realtime.bean.OrderInfo;
import com.atguigu.gmall.realtime.bean.OrderProtalBean;
import com.atguigu.gmall.realtime.bean.OrderWide;
import com.atguigu.gmall.realtime.common.GmallConfig;
import com.atguigu.gmall.realtime.utils.ClickHouseUtil;
import com.atguigu.gmall.realtime.utils.MyKafkaUtil;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.AsyncDataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.co.ProcessJoinFunction;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.util.Collector;

import java.text.SimpleDateFormat;
import java.time.Duration;
import java.util.concurrent.TimeUnit;

public class orderdd {
    public static void main(String[] args) throws Exception {
        //设置流环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        //设置并行度
        env.setParallelism(4);

        //声明相关的主题及消费者
        String orderInfoSourceTopic="dwd_order_info";
        String orderDetailSourceTopic="dwd_order_detail";
        String orderWideSinkTopic="dwm_order_wide";
        String groupId="order_wide_group";

        //读取订单的主题数据
        FlinkKafkaConsumer<String> orderInfoSource = MyKafkaUtil.getKafkaSource(orderInfoSourceTopic, groupId);
        DataStreamSource<String> orderDetailJsonStrDS = env.addSource(orderInfoSource);

        //转换订单数据结构
        SingleOutputStreamOperator<OrderInfo> orderInfoDS = orderDetailJsonStrDS.map(
                new RichMapFunction<String, OrderInfo>() {
                    SimpleDateFormat sdf = null;

                    @Override
                    public void open(Configuration parameters) throws Exception {
                        sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
                    }

                    @Override
                    public OrderInfo map(String jsonStr) throws Exception {
                        OrderInfo orderInfo = JSON.parseObject(jsonStr, OrderInfo.class);
                        orderInfo.setCreate_ts(sdf.parse(orderInfo.getCreate_time()).getTime());
                        return orderInfo;
                    }
                }
        );

        //转换订单明细表
        SingleOutputStreamOperator<OrderDetail> orderDetailDS = orderDetailJsonStrDS.map(
                new RichMapFunction<String, OrderDetail>() {
                    SimpleDateFormat sdf = null;

                    @Override
                    public void open(Configuration parameters) throws Exception {
                        sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
                    }

                    @Override
                    public OrderDetail map(String jsonStr) throws Exception {
                        OrderDetail orderDetail = JSON.parseObject(jsonStr, OrderDetail.class);
                        orderDetail.setCreate_ts(sdf.parse(orderDetail.getCreate_time()).getTime());
                        return orderDetail;
                    }
                }
        );

        //订单指定字段时间字段
        SingleOutputStreamOperator<OrderInfo> orderInfoWithTsDS  = orderInfoDS.assignTimestampsAndWatermarks(
                WatermarkStrategy
                        .<OrderInfo>forBoundedOutOfOrderness(Duration.ofSeconds(3))
                        .withTimestampAssigner(new SerializableTimestampAssigner<OrderInfo>() {
                            @Override
                            public long extractTimestamp(OrderInfo orderInfo, long l) {
                                return orderInfo.getCreate_ts();
                            }
                        })
        );

        //订单明细指定时间时间字段
        SingleOutputStreamOperator<OrderDetail> orderDetailWithTsDS  = orderDetailDS.assignTimestampsAndWatermarks(
                WatermarkStrategy.<OrderDetail>forBoundedOutOfOrderness(Duration.ofSeconds(3))
                        .withTimestampAssigner(new SerializableTimestampAssigner<OrderDetail>() {
                            @Override
                            public long extractTimestamp(OrderDetail orderDetail, long recordTimestamp) {
                                return orderDetail.getCreate_ts();
                            }
                        })
        );

        //按照订单id分组，指定关联的key
        KeyedStream<OrderInfo, Long> orderInfoKeyedDS = orderInfoWithTsDS.keyBy(OrderInfo::getId);
        KeyedStream<OrderDetail, Long> orderDetailKeyedDS = orderDetailWithTsDS.keyBy(OrderDetail::getOrder_id);


        //两个表进行关联
        SingleOutputStreamOperator<JSONObject> orderWideDS = orderInfoKeyedDS
                .intervalJoin(orderDetailKeyedDS)
                .between(Time.milliseconds(-5), Time.milliseconds(5))
                .process(
                        new ProcessJoinFunction<OrderInfo, OrderDetail, JSONObject>() {
                            @Override
                            public void processElement(OrderInfo orderInfo, OrderDetail orderDetail, Context ctx, Collector<JSONObject> out) throws Exception {

                                OrderWide orderWide = new OrderWide(orderInfo, orderDetail);
                                //将合并后的对象进行再一次的转化为json 对象为后面的方便合并做准备
                                String jsonString = JSON.toJSONString(orderWide);
                                JSONObject jsonObject = JSON.parseObject(jsonString);
                                out.collect(jsonObject);
                            }
                        }
                );

        orderWideDS.print("orderWide>>>>");

        //关联用户的维度
        SingleOutputStreamOperator<JSONObject> orderWideWithUserDS = AsyncDataStream.unorderedWait(orderWideDS,

                new DimAsyncFunctionNew1(GmallConfig.DIM_USER_INFO),
                60, TimeUnit.SECONDS);

        orderWideWithUserDS.print(">>>>>");


        SingleOutputStreamOperator<JSONObject> orderWideWithProvice = AsyncDataStream.unorderedWait(
                orderWideWithUserDS,
                new DimAsyncFunctionNew1(GmallConfig.DIM_PROVICE),
                60, TimeUnit.SECONDS
        );

        //关联sku维度
        SingleOutputStreamOperator<JSONObject> orderWideWitSku = AsyncDataStream.unorderedWait(
                orderWideWithProvice,
                new DimAsyncFunctionNew1(GmallConfig.DIM_SKU_INFO),
                60, TimeUnit.SECONDS
        );

        //关联商品维度
        SingleOutputStreamOperator<JSONObject> orderWideWitSpu = AsyncDataStream.unorderedWait(
                orderWideWitSku,
                new DimAsyncFunctionNew1(GmallConfig.DIM_SPU_INFO),
                60, TimeUnit.SECONDS
        );

        //关联品类维度
        SingleOutputStreamOperator<JSONObject> orderWideWithCatege3 = AsyncDataStream.unorderedWait(
                orderWideWitSpu,
                new DimAsyncFunctionNew1(GmallConfig.DIM_BASE_CATEGORY3),
                60, TimeUnit.SECONDS
        );

        //关联品牌维度
        SingleOutputStreamOperator<JSONObject> orderWideWithTrademark = AsyncDataStream.unorderedWait(
                orderWideWithCatege3,
                new DimAsyncFunctionNew1(GmallConfig.DIM_BASE_TRADEMARK),
                60, TimeUnit.SECONDS
        );


        //将关联后的订单宽表数据写回到kafka的DWM层
        orderWideWithTrademark
                .map(
                        orderWide->JSON.toJSONString(orderWide)
                )
                .addSink(MyKafkaUtil.getKafkaSink(orderWideSinkTopic));

        //写入到ClickHouse转化为实体bean 然后插入数据
        SingleOutputStreamOperator<OrderProtalBean> orderProtalStream = orderWideWithTrademark.map(new MapFunction<JSONObject, OrderProtalBean>() {
            @Override
            public OrderProtalBean map(JSONObject jsonObject) throws Exception {

                return jsonObject.toJavaObject(OrderProtalBean.class);
            }
        });
        orderProtalStream.addSink(
                ClickHouseUtil.getJdbcSink(" insert into protrait_order(category1_id,category2_id,category3_id,category1_name,category2_name, category3_name,carrier,carriername, user_gendertype,user_gender,province_id,province_name,tm_name,email, emailtype,user_age,yearbasetype,yearbasename,create_time,user_id,order_id,order_price,sku_num ,coupon_reduce_amount,total_amount,activity_reduce_amount,spu_name) values(?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)")
        );

        env.execute();

    }
}
