package com.ruyuan.engine.core;

import com.alibaba.fastjson.JSON;
import com.ruyuan.engine.pojo.ClientLog;
import com.ruyuan.engine.utils.Constants;
import com.ruyuan.engine.utils.FlinkKafkaUtils;
import com.ruyuan.engine.utils.HBaseUtils;
import com.ruyuan.engine.utils.RedisUtils;
import org.apache.commons.collections.IteratorUtils;
import org.apache.flink.api.common.state.ListState;
import org.apache.flink.api.common.state.ListStateDescriptor;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.KeyedProcessFunction;
import org.apache.flink.util.Collector;
import org.apache.hadoop.hbase.client.HTable;
import org.locationtech.spatial4j.distance.DistanceUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import redis.clients.jedis.Jedis;

import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Iterator;
import java.util.List;

/**
 * 第一版需求：
 *      1. 完成是否异地登录判断
 * */
public class ProcessBaseRule1 {
    protected static final Logger LOG = LoggerFactory.getLogger(ProcessBaseRule1.class);

    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment environment = StreamExecutionEnvironment.createLocalEnvironmentWithWebUI(new Configuration());
        //获取Kafka数据源
        DataStreamSource<String> source = environment.addSource(FlinkKafkaUtils.getKafkaEventSource());
        source.print();
        environment.execute();
    }


}