package MicroTime;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.alibaba.fastjson.serializer.SerializeConfig;
import com.streamxhub.streamx.flink.core.StreamEnvConfig;
import com.streamxhub.streamx.flink.core.java.sink.KafkaSink;
import com.streamxhub.streamx.flink.core.java.source.KafkaSource;
import com.streamxhub.streamx.flink.core.scala.StreamingContext;
import com.streamxhub.streamx.flink.core.scala.source.KafkaRecord;
import kafka2Hbase.Utils.MD5Util;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.api.common.state.StateTtlConfig;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.contrib.streaming.state.RocksDBStateBackend;
import org.apache.flink.runtime.fs.hdfs.HadoopFileSystem;
import org.apache.flink.runtime.state.filesystem.FsStateBackend;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import java.io.IOException;
import java.time.Duration;
import java.util.Date;
import java.util.Objects;

/**
 * @author Spring_Hu
 * @date 2022/6/6 20:36
 */
public class MicroOnTime {
    private static String checkpoint_dir = "micro_on_time_stat_prod";

    public static void main(String[] args) throws IOException {
        StreamEnvConfig envConfig = new StreamEnvConfig(args, null);
        StreamingContext context = new StreamingContext(envConfig);
        context.enableCheckpointing(10000);
        context.getCheckpointConfig().setExternalizedCheckpointCleanup(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);
       //context.setStateBackend(new FsStateBackend(Constants.CHECKPOINT_DIR + checkpoint_dir));
        context.getCheckpointConfig().setCheckpointStorage(Constants.CHECKPOINT_DIR + checkpoint_dir);
        SingleOutputStreamOperator<String> resultStream = new KafkaSource<String>(context)
                .getDataStream()
                .filter(item -> {
                    JSONObject jsonObject = JSON.parseObject(item.value());
                    String action =null;
                    if(jsonObject!=null){
                        action = jsonObject.getString("eventType");
                    }
                    return ("CHAT_ROOM_MIC_UP").equals(action) || ("CHAT_ROOM_MIC_DOWN").equals(action);
                })
                .map(new RichMapFunction<KafkaRecord<String>, MicoOnPojo>() {
                    @Override
                    public MicoOnPojo map(KafkaRecord<String> stringKafkaRecord) {
                        JSONObject json = JSON.parseObject(stringKafkaRecord.value());
                        String roomNo = json.getJSONArray("eventParas").getString(0);
                        Long userId = json.getLong("busiId");
                        String action = json.getString("eventType");
                        Long ingestionTimestamp = json.getLong("ingestion_timestamp");
                        Date ingestionTime = new Date(ingestionTimestamp);
                        return new MicoOnPojo(userId, roomNo, action, ingestionTime, ingestionTimestamp);
                    }
                }).assignTimestampsAndWatermarks(WatermarkStrategy
                        .<MicoOnPojo>forBoundedOutOfOrderness(Duration.ofSeconds(3))
                        .withTimestampAssigner((element, ts) -> element.getIngestion_time_stamp()))
                .keyBy(value -> value.getUser_id() + "#" + value.getRoom_no())
                .map(new RichMapFunction<MicoOnPojo, MicroOnResult>() {
                    ValueState<MicoOnPojo> micoOnPojoValueState = null;
                    @Override
                    public void open(Configuration parameters) {
                        ValueStateDescriptor<MicoOnPojo> stateDescriptor = new ValueStateDescriptor<>("micro", MicoOnPojo.class);
                        StateTtlConfig ttlConfig = StateTtlConfig.newBuilder(Time.days(7))
                                .setUpdateType(StateTtlConfig.UpdateType.OnReadAndWrite)
                                .setStateVisibility(StateTtlConfig.StateVisibility.NeverReturnExpired)
                                .cleanupInRocksdbCompactFilter(1000L)
                                .build();
                        stateDescriptor.enableTimeToLive(ttlConfig);
                        micoOnPojoValueState = getRuntimeContext().getState(stateDescriptor);
                    }

                    @Override
                    public MicroOnResult map(MicoOnPojo micoOnPojo) throws IOException {
                        MicroOnResult result = null;
                        if (("CHAT_ROOM_MIC_UP").equals(micoOnPojo.getAction())) {
                            micoOnPojoValueState.update(micoOnPojo);
                        }
                        if (("CHAT_ROOM_MIC_DOWN").equals(micoOnPojo.getAction())) {

                            if (micoOnPojoValueState.value() != null) {

                                MicoOnPojo micro_on_time = micoOnPojoValueState.value();

                                long micro_during_time = micoOnPojo.getIngestion_time_stamp() - micro_on_time.getIngestion_time_stamp();
                                String key = MD5Util.code(micoOnPojo.getRoom_no() + "#" + micoOnPojo.getUser_id()
                                        + "#" + micro_on_time.getIngestion_time_stamp());

                                result = new MicroOnResult(key,micoOnPojo.getRoom_no(), micoOnPojo.getUser_id(),
                                        micro_during_time, TransFormUtils.getDateString(micro_on_time.getIngestion_time()),
                                        TransFormUtils.getDateString(micoOnPojo.getIngestion_time()));
                                micoOnPojoValueState.clear();
                            }
                        }
                        return result;
                    }
                }).filter(Objects::nonNull)
                .map(result -> JSON.toJSONString(result, new SerializeConfig(true)));

        //1.将数据写到kafka
        new KafkaSink<String>(context).sink(resultStream);

        //2.直接通过phoenix写入Hbase中
        resultStream.addSink(new HbaseSink());

        context.start();
    }
}
