package com.vivo.app;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.vivo.bean.UserFeature;
import com.vivo.bean.UserWide;
import com.vivo.dws.BaseAppV1;
import com.vivo.util.DimUtil;
import com.vivo.util.RedisUtil;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import redis.clients.jedis.Jedis;

import java.time.Duration;


public class demo2 extends BaseAppV1 {
    public static void main(String[] args) {
        // 端口号:port 并行度: p  指定ck存放位置: ck      消费kafka数据-->  消费者组:groupId 主题: topic
   new demo2().init(3003,1,"UserFeature_Wide","UserFeature_Wide","UserFeature");
    }


    @Override
    protected void run(StreamExecutionEnvironment env,
                       DataStreamSource<String> stream) {
        // kafka 流
        SingleOutputStreamOperator<UserFeature> kafkaStream = stream.map(info -> JSON.parseObject(info, UserFeature.class))
                .assignTimestampsAndWatermarks(
                        WatermarkStrategy.<UserFeature>forBoundedOutOfOrderness(Duration.ofSeconds(3))
                                .withTimestampAssigner(new SerializableTimestampAssigner<UserFeature>() {
                                    @Override
                                    public long extractTimestamp(UserFeature element, long recordTimestamp) {
                                        return element.getAge(ddddd);
                                    }
                                }));
        DimJoin(kafkaStream);
    }

    private void DimJoin(SingleOutputStreamOperator<UserFeature> stream) {
        // 每来一条数据,都通过jdbc 去 redis 中查找特征数据 :2张表 用户和 物料
        stream.map(new RichMapFunction<UserFeature, UserWide>() {
            private Jedis jedis;
            @Override
            public void open(Configuration parameters) throws Exception {
                // 获取redis客户端连接
                jedis = RedisUtil.getRedisClient();
            }
            @Override
            public void close() throws Exception {
                if (jedis != null) {
                    // 不一定时关闭客户端  如果客户端时new Jedis 得到的，则是关闭客户端
                    // 如果客户端 是  通过连接池得到，则是归还 客户端
                    jedis.close();
                }
            }
            @Override
            public UserWide map(UserFeature value) throws Exception {
                // 对两张表进行 补充
                JSONObject userInfo = DimUtil.readDimFromRedis(jedis, "dim_user_feature", value.getUserid());
                value.setAge(userInfo.getString(""));

                return null;
            }
        });

    }


    private static JSONObject readDimFromRedis(Jedis jedis, String table, Long id) {
        String key = table + ":" +id;
        String s = jedis.get(key);
        if (s != null) {
            return JSON.parseObject(s);
        }
        return null;

    }
}
