package com.atguigu.gmall.realtime.app.dwm;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.gmall.realtime.app.BaseAppV2;
import com.atguigu.gmall.realtime.bean.OrderDetail;
import com.atguigu.gmall.realtime.bean.OrderInfo;
import com.atguigu.gmall.realtime.bean.OrderWide;
import com.atguigu.gmall.realtime.util.DimUtil;
import com.atguigu.gmall.realtime.util.JdbcUtil;
import com.atguigu.gmall.realtime.util.RedisUtil;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.co.ProcessJoinFunction;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.util.Collector;
import redis.clients.jedis.Jedis;

import java.sql.Connection;
import java.time.Duration;
import java.util.Map;

import static com.atguigu.gmall.realtime.common.Constant.*;

/**
 * @Author lzc
 * @Date 2022/4/18 14:02
 */
public class DwmOrderWideApp_Cache extends BaseAppV2 {
    public static void main(String[] args) {
        new DwmOrderWideApp_Cache().init("DwmOrderWideApp_Cache",
                                         3003,
                                         1,
                                         "DwmOrderWideApp_Cache",
                                         "DwmOrderWideApp_Cache",
                                         TOPIC_DWD_ORDER_INFO, TOPIC_DWD_ORDER_DETAIL
        );
        
        
    }
    
    @Override
    public void handle(StreamExecutionEnvironment env,
                       Map<String, DataStreamSource<String>> topicToStreamMap) {
        
        // 1. 事实表join
        SingleOutputStreamOperator<OrderWide> orderWideStreamWithoutDims = factsJoin(topicToStreamMap);
        // 2. 事实表与维度表"join"
        factJoinDim(orderWideStreamWithoutDims);
        
        // 3. 把宽表数据写入到Kafka中
    }
    
    private void factJoinDim(SingleOutputStreamOperator<OrderWide> orderWideStreamWithoutDims) {
    /*
    user_info
    base_province
    sku_info spu_info base_trademark base_category3
     */
        orderWideStreamWithoutDims
            .map(new RichMapFunction<OrderWide, OrderWide>() {
    
                private Jedis redisClient;
                private Connection phoenixConn;
                
                @Override
                public void open(Configuration parameters) throws Exception {
                    phoenixConn = JdbcUtil.getJdbcConnection(PHOENIX_DRIVER, PHOENIX_URL, null, null);
    
                    redisClient = RedisUtil.getRedisClient();
                }
    
                @Override
                public void close() throws Exception {
                    if (phoenixConn != null) {
                        phoenixConn.close();
                    }
    
                    if (redisClient != null) {
                        redisClient.close(); // 如果客户端是从连接池获取的, close表示归还. 如果是通过new Jedis()得到, 则是关闭
                    }
                }
    
                @Override
                public OrderWide map(OrderWide orderWide) throws Exception {
                    //1. 补齐user_info
                    System.out.println(orderWide.getUser_id());
                    JSONObject userInfo = DimUtil.readDim(redisClient, phoenixConn, "dim_user_info", orderWide.getUser_id());
                    orderWide.setUser_gender(userInfo.getString("GENDER"));  // 获取性别
                    orderWide.calculateUserAge(userInfo.getString("BIRTHDAY"));
                    
                    // 2. base_province
                    JSONObject baseProvince = DimUtil.readDim(redisClient,phoenixConn, "dim_base_province", orderWide.getProvince_id());
                    orderWide.setProvince_name(baseProvince.getString("NAME"));
                    orderWide.setProvince_iso_code(baseProvince.getString("ISO_CODE"));
                    orderWide.setProvince_area_code(baseProvince.getString("AREA_CODE"));
                    orderWide.setProvince_3166_2_code(baseProvince.getString("ISO_3166_2"));
                    
                    // 3. sku
                    JSONObject skuInfo = DimUtil.readDim(redisClient,phoenixConn, "dim_sku_info", orderWide.getSku_id());
                    orderWide.setSku_name(skuInfo.getString("SKU_NAME"));
                    orderWide.setOrder_price(skuInfo.getBigDecimal("PRICE"));
                    
                    orderWide.setSpu_id(skuInfo.getLong("SPU_ID"));
                    orderWide.setCategory3_id(skuInfo.getLong("CATEGORY3_ID"));
                    orderWide.setTm_id(skuInfo.getLong("TM_ID"));
                    
                    // 4. spu
                    JSONObject spuInfo = DimUtil.readDim(redisClient,phoenixConn, "dim_spu_info", orderWide.getSpu_id());
                    orderWide.setSpu_name(spuInfo.getString("SPU_NAME"));
                    
                    JSONObject c3 = DimUtil.readDim(redisClient,phoenixConn, "dim_base_category3", orderWide.getCategory3_id());
                    orderWide.setCategory3_name(c3.getString("NAME"));
                    
                    JSONObject tm = DimUtil.readDim(redisClient,phoenixConn, "dim_base_trademark", orderWide.getTm_id());
                    orderWide.setTm_name(tm.getString("TM_NAME"));
                    
                    
                    return orderWide;
                }
            })
            .print();
        
    }
    
    private SingleOutputStreamOperator<OrderWide> factsJoin(Map<String, DataStreamSource<String>> topicToStreamMap) {
        KeyedStream<OrderInfo, Long> orderInfoStream = topicToStreamMap
            .get(TOPIC_DWD_ORDER_INFO)
            .map(info -> JSON.parseObject(info, OrderInfo.class))
            .assignTimestampsAndWatermarks(
                WatermarkStrategy
                    .<OrderInfo>forBoundedOutOfOrderness(Duration.ofSeconds(3))
                    .withTimestampAssigner((info, ts) -> info.getCreate_ts())
            )
            .keyBy(OrderInfo::getId);
        
        
        KeyedStream<OrderDetail, Long> orderDetailStream = topicToStreamMap
            .get(TOPIC_DWD_ORDER_DETAIL)
            .map(info -> JSON.parseObject(info, OrderDetail.class))
            .assignTimestampsAndWatermarks(
                WatermarkStrategy
                    .<OrderDetail>forBoundedOutOfOrderness(Duration.ofSeconds(3))
                    .withTimestampAssigner((detail, ts) -> detail.getCreate_ts())
            )
            .keyBy(OrderDetail::getOrder_id);
        
        
        return orderInfoStream
            .intervalJoin(orderDetailStream)
            .between(Time.seconds(-5), Time.seconds(5))
            .process(new ProcessJoinFunction<OrderInfo, OrderDetail, OrderWide>() {
                @Override
                public void processElement(OrderInfo left,
                                           OrderDetail right,
                                           Context ctx,
                                           Collector<OrderWide> out) throws Exception {
                    out.collect(new OrderWide(left, right));
                }
            });
    }
}
/*

缓存优化: 把已经查到的维度信息存入到内存,下次使用的时候直接从内存读取

内部的内存
flink的状态
    好处:
        1. flink的内存, 读写极快
        2. flink的状态提供丰富数据结构
     
     坏处:
        1. 维度数据过多, 占用大量的状态, 影响flink的计算时内存使用
        2. 维度虽然变化满,但是也会有可能发生变化
            缓存发生变化, 状态没有办法及时更新

外部的内存
redis
    好处:
    
    
    坏处:
        1. 每次需要通过网络访问redis, 速度受影响
        2. 影响到redis的内存的占用
        
     好处:
        维度发生变化的时候, 可以及时更新
        

旁路缓存

*/