package org.shuzhou.h_sink;

import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.connector.elasticsearch.sink.Elasticsearch7SinkBuilder;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.http.HttpHost;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.client.Requests;

import java.util.HashMap;
import java.util.Map;


public class FlinkToElasticsearchExample {

    public static void main(String[] args) throws Exception {
        // 设置执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        DataStream<String> logStream = env.fromElements(
            "2023-12-14 08:30:12 - INFO 用户登录成功，用户名：JohnDoe",
            "2023-12-14 09:15:29 - INFO 用户查看了产品列表页面",
            "2023-12-14 10:02:45 - INFO 用户添加了商品到购物车，商品ID：12345",
            "2023-12-14 11:20:01 - INFO 用户提交了订单，订单号：67890",
            "2023-12-14 13:45:18 - INFO 用户收到订单确认邮件",
            "2023-12-14 14:30:55 - INFO 用户浏览了帮助中心页面",
            "2023-12-14 15:10:27 - INFO 用户退出登录");
            

        // 将日志数据写入 Elasticsearch
        logStream.sinkTo(
        new Elasticsearch7SinkBuilder<String>()
            // 下面的设置使 sink 在接收每个元素之后立即提交，否则这些元素将被缓存起来
            .setBulkFlushMaxActions(1)
            .setHosts(new HttpHost("127.0.0.1", 9200, "http"))
            .setEmitter(
            (element, context, indexer) ->
            indexer.add(createIndexRequest(element)))
            .build());

        // 执行任务
        env.execute("Write to Elasticsearch");
    }

    private static IndexRequest createIndexRequest(String element) {
        Map<String, Object> json = new HashMap<>();
        json.put("data", element);
    
        return Requests.indexRequest()
            .index("my-index")
            .id(element)
            .source(json);
    }
}