package com.dayouzi.crawler_data.flink.first.job;

import com.dayouzi.crawler_data.dao.OnlyforcrawlerDao;
import com.dayouzi.crawler_data.entity.CrawlerDetailNew;
import com.dayouzi.crawler_data.entity.DayEntity;
import com.dayouzi.crawler_data.utils.IdGenerator;
import com.dayouzi.crawler_data.utils.PropertiesUtil;
import com.dayouzi.crawler_data.utils.StreamSourceUtil;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.gson.Gson;
import com.ververica.cdc.connectors.mysql.source.MySqlSource;
import com.ververica.cdc.connectors.mysql.table.StartupOptions;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.FilterFunction;
import org.apache.flink.api.common.state.MapState;
import org.apache.flink.api.common.state.MapStateDescriptor;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.KeyedProcessFunction;
import org.apache.flink.streaming.api.functions.windowing.RichProcessWindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;
import org.apache.http.HttpHost;
import org.elasticsearch.action.bulk.BulkRequest;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.client.RequestOptions;
import org.elasticsearch.client.RestClient;
import org.elasticsearch.client.RestHighLevelClient;
import org.elasticsearch.common.xcontent.XContentType;

import java.time.Duration;
import java.time.LocalDateTime;
import java.time.ZoneId;
import java.time.format.DateTimeFormatter;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.ConcurrentLinkedQueue;

public class FirstDayJob {

    // 数据库dao层
    private static OnlyforcrawlerDao dao = new OnlyforcrawlerDao();
    
    // 用于es批量插入数据时缓存数据
    private static ConcurrentLinkedQueue<DayEntity> dataList = new ConcurrentLinkedQueue<>();
    
    // id生成器
    private static IdGenerator idGenerator = new IdGenerator("crawlerweb_day");
    
    // serverId唯一值
    private static String serverId;
    
    public FirstDayJob() {
        serverId = PropertiesUtil.getProperties("day.serverId");
    }
    
    public static void main(String[] args) throws Exception {
        // 创建执行器
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        // 获取数据源
        MySqlSource<String> mySqlSource = StreamSourceUtil.getMySqlSource(
                "127.0.0.1",
                3306,
                "root",
                "123456",
                "onlyforcrawler",
                "onlyforcrawler.crawlerweb_detail_new",
                "Asia/Shanghai",
                StartupOptions.initial(),
                serverId,
                100
        );
        DataStream<String> dataStream = env.fromSource(mySqlSource, WatermarkStrategy.noWatermarks(), "crawlerweb_detail_newSource");

        // 数据源转换成指定实体类
        DataStream<CrawlerDetailNew> crawlerDetailNewDataStream = StreamSourceUtil.getCrawlerDetailStream(dataStream);

        // 为历史数据添加时间戳
        crawlerDetailNewDataStream = crawlerDetailNewDataStream
                .assignTimestampsAndWatermarks(WatermarkStrategy.<CrawlerDetailNew>forBoundedOutOfOrderness(Duration.ZERO)
                        .withTimestampAssigner(new SerializableTimestampAssigner<CrawlerDetailNew>() {
                            @Override
                            public long extractTimestamp(CrawlerDetailNew crawlerDetailNew, long l) {
                                return crawlerDetailNew.getGmtCreate().atZone(ZoneId.systemDefault()).toLocalDate().atStartOfDay().atZone(ZoneId.systemDefault()).toInstant().toEpochMilli();
                            }
                        })
                );

        SingleOutputStreamOperator<DayEntity> resultStream = crawlerDetailNewDataStream
                .filter((FilterFunction<CrawlerDetailNew>) item -> {
                    if (dao.getSpiderType(item.getForid()) == null) {
                        return false;
                    } else return dao.getSpiderType(item.getForid()) == 0;
                })
                .keyBy(new KeySelector<CrawlerDetailNew, Tuple2<Integer, String>>() {
                    @Override
                    public Tuple2<Integer, String> getKey(CrawlerDetailNew value) throws Exception {
                        Integer forid = value.getForid();
                        String date = value.getGmtCreate().toLocalDate().toString();
                        return new Tuple2<>(forid, date);
                    }
                })
                .window(TumblingEventTimeWindows.of(Time.days(1), Time.hours(-8)))
                .process(new RichProcessWindowFunction<CrawlerDetailNew, DayEntity, Tuple2<Integer, String>, TimeWindow>() {
                    private transient MapState<String, Integer> mapState;
                    @Override
                    public void open(Configuration parameters) throws Exception {
                        mapState = getRuntimeContext().getMapState(new MapStateDescriptor<String, Integer>("mapState", String.class, Integer.class));
                    }

                    @Override
                    public void process(Tuple2<Integer, String> keyBys, Context context, Iterable<CrawlerDetailNew> iterable, Collector<DayEntity> collector) throws Exception {
                        for (CrawlerDetailNew item : iterable) {
                            String type = item.getType();
                            Integer count = item.getCount();
                            Integer sum = mapState.get(type);
                            if (sum == null) {
                                mapState.put(type, count);
                            } else {
                                mapState.put(type, count + sum);
                            }
                        }
                        Map<String, Integer> map = new HashMap<>();
                        for (String key : mapState.keys()) {
                            map.put(key, mapState.get(key));
                        }

                        // 封装数据
                        Long id = IdGenerator.getAutoId();
                        String time = LocalDateTime.now().format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss"));
                        collector.collect(new DayEntity(id, keyBys.f0, keyBys.f1, map, time, time));
                    }
                });
        
//        resultStream.print("result");

        resultStream.keyBy(DayEntity::getId)
                .process(new KeyedProcessFunction<Long, DayEntity, Object>() {
                    private Gson gson;
                    private RestHighLevelClient client;
                    private transient volatile boolean isTimerSet = false;
                    private transient final long interval = 10000;

                    @Override
                    public void open(Configuration parameters) throws Exception {
                        gson = new Gson();
                        client = new RestHighLevelClient(RestClient.builder(
                                new HttpHost("192.168.1.153", 9200, "http")
                        ));
                    }

                    @Override
                    public void processElement(DayEntity entity, Context context, Collector<Object> collector) throws Exception {
                        // 暂存数据
                        dataList.add(entity);
                        
                        // 批量入库
                        if (dataList.size() >= 1000) {
                            bulkRequest();
                        }
                        
                        // 设置定时器
                        if (!isTimerSet) {
                            context.timerService().registerProcessingTimeTimer(System.currentTimeMillis() + interval);
                            isTimerSet = true;
                        }
                    }

                    @Override
                    public void onTimer(long timestamp, OnTimerContext ctx, Collector<Object> out) throws Exception {
                        // 批量入库
                        if (dataList.size() > 0) {
                            bulkRequest();
                        }
                        
                        isTimerSet = false;
                    }

                    // 批量入库逻辑
                    private void bulkRequest() throws Exception {
                        BulkRequest bulkRequest = new BulkRequest();
                        for (DayEntity everyDayEntity : dataList) {
                            IndexRequest request = new IndexRequest("crawlerweb_day", "_doc");
                            ObjectMapper objectMapper = new ObjectMapper();
                            String jsonData = objectMapper.writeValueAsString(everyDayEntity);
                            request.source(jsonData, XContentType.JSON);
                            bulkRequest.add(request);
                        }
                        client.bulk(bulkRequest, RequestOptions.DEFAULT);
                        
                        // 清空队列
                        System.out.println("批量入库 " + dataList.size() + " 条数据");
                        dataList = new ConcurrentLinkedQueue<>();
                    }

                    @Override
                    public void close() throws Exception {
                        client.close();
                    }
                });

        env.execute();
    }


}
