import cn.sh.yhk.model.InviteBids;
import com.alibaba.fastjson.JSONObject;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.commons.lang3.StringUtils;
import org.apache.flink.api.common.functions.FilterFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.RuntimeContext;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.elasticsearch.ElasticsearchSinkFunction;
import org.apache.flink.streaming.connectors.elasticsearch.RequestIndexer;
import org.apache.flink.streaming.connectors.elasticsearch.util.RetryRejectedExecutionFailureHandler;
import org.apache.flink.streaming.connectors.elasticsearch6.ElasticsearchSink;
import org.apache.flink.streaming.connectors.elasticsearch6.RestClientFactory;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.http.Header;
import org.apache.http.HttpHost;
import org.apache.http.message.BasicHeader;
import org.elasticsearch.client.Requests;
import org.elasticsearch.client.RestClientBuilder;

import java.util.ArrayList;
import java.util.List;
import java.util.Properties;

//@RunWith(SpringRunner.class)
//@SpringBootTest(classes = Application.class)
public class OutputFlink {

//    @Autowired
//    InviteBidsMapper inviteBidsMapper;
//
//    @Autowired
//    KafkaTemplate<String, Object> kafkaTemplate;

    /**
     * 读取kafka数据
     *
     * @throws Exception
     */
    //@Test
    public void doTest() throws Exception {

    }

    public static void main(String[] args) throws Exception {
        inputData();
    }


    public static void main(String[] args) throws Exception {
        final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        Properties props = new Properties();
        props.put("bootstrap.servers", "localhost:9092");
        props.put("zookeeper.connect", "localhost:2181");
        props.put("group.id", "student-group-1");
        props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        props.put("auto.offset.reset", "latest");

//        env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);

        DataStreamSource<String> student = env.addSource(new FlinkKafkaConsumer<>(
                //这个 kafka topic 需要和上面的工具类的 topic 一致
                "test",
                new SimpleStringSchema(),
                props)).setParallelism(1);
//                .map(string -> JSON.parseObject(string, Student.class)); //Fastjson 解析字符串成 student 对象
        Map<String, String> config = new HashMap<>();
        config.put("cluster.name", "elasticsearch");
// This instructs the sink to emit after every element, otherwise they would be buffered
        config.put("bulk.flush.max.actions", "1");
//        config.put("auth_user","elastic");
//        config.put("auth_password","changeme");

        List<InetSocketAddress> transportAddresses = new ArrayList<>();
        transportAddresses.add(new InetSocketAddress(InetAddress.getByName("127.0.0.1"), 9300));
//        transportAddresses.add(new InetSocketAddress(InetAddress.getByName("10.2.3.1"), 9300));

        student.addSink(new ElasticsearchSink.Builder<>(config, transportAddresses, new ElasticsearchSinkFunction<String>() {
            public IndexRequest createIndexRequest(String element) {
                Map<String, String> json = new HashMap<>();
                json.put("data", element);
                return Requests.indexRequest()
                        .index("my-index-student-0211")
                        .type("my-type")
                        .source(json);
            }

            @Override
            public void process(String element, RuntimeContext ctx, RequestIndexer indexer) {
                indexer.add(createIndexRequest(element));
            }
        }));
//
//        env.execute("flink learning connectors kafka");
//
//    }

    private static volatile int i = 0;
    private static void inputData() throws Exception {

        final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        java.util.Properties properties = new Properties();
        properties.setProperty("bootstrap.servers", "127.0.0.1:9092");
        properties.setProperty("zookeeper.connect", "127.0.0.1:2181");
        properties.setProperty("group.id", "test");
//        properties.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
//        properties.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        properties.put("auto.offset.reset", "latest");
        //这个 kafka topic 需要和上面的工具类的 topic 一致
//        "test",
//                new SimpleStringSchema(), e(new FlinkKafkaConsumer<JSONObject
//                properties).setParallelism(1);

        DataStream<String> kafka = env.addSource(new FlinkKafkaConsumer<>("test1", new SimpleStringSchema(), properties));

//        DataStream<String> dataStream = kafka.map(new MapFunction<String, String>() {
//            @Override
//            public String map(String s) {
//                return s;
//            }
//        });
        DataStream<String> dataStream = kafka.filter(new FilterFunction<String>() {//只保留皮卡车型
            @Override
            public boolean filter(String s) throws Exception {
                if (isJsonObject(s)) {
                    ObjectMapper objectMapper = new ObjectMapper();
                    InviteBids obj = objectMapper.readValue(s, InviteBids.class);
                    if (obj.getCarModel() != null && obj.getCarModel().equals("皮卡")) {
                        return true;
                    } else {
                        return false;
                    }
                } else {
                    return false;
                }
            }
        });
        DataStream<JSONObject> json = dataStream.map(new MapFunction<String, JSONObject>() {//转json
            @Override
            public JSONObject map(String s) throws Exception {
                return JSONObject.parseObject(s);
            }

        });
//算子统计
//
//        kafka.print();
        List<HttpHost> esHttphost = new ArrayList<>();
        esHttphost.add(new HttpHost("127.0.0.1", 9200, "http"));

        ElasticsearchSink.Builder<JSONObject> esSinkBuilder = new ElasticsearchSink.Builder<JSONObject>(
                esHttphost,
                new ElasticsearchSinkFunction<JSONObject>() {

                    @Override
                    public void process(JSONObject element, RuntimeContext ctx, RequestIndexer indexer) {
                        indexer.add(Requests.indexRequest().index("index_customer_1").type("type_customer").source(element));
                    }

                }
        );

        esSinkBuilder.setBulkFlushMaxActions(1);
        esSinkBuilder.setRestClientFactory(new RestClientFactory() {
            @Override
            public void configureRestClientBuilder(RestClientBuilder restClientBuilder) {
                Header[] headers = new BasicHeader[]{new BasicHeader("Content-Type", "application/json")};
                restClientBuilder.setDefaultHeaders(headers); //以数组的形式可以添加多个header
                restClientBuilder.setMaxRetryTimeoutMillis(90000);
            }
        });
        esSinkBuilder.setFailureHandler(new RetryRejectedExecutionFailureHandler());
        json.addSink(esSinkBuilder.build());
        env.execute("flink learning connectors kafka");


    }

    public static boolean isJsonObject(String content) {
        // 此处应该注意，不要使用StringUtils.isEmpty(),因为当content为"  "空格字符串时，JSONObject.parseObject可以解析成功，
        // 实际上，这是没有什么意义的。所以content应该是非空白字符串且不为空，判断是否是JSON数组也是相同的情况。
        if (StringUtils.isBlank(content))
            return false;
        try {
            JSONObject jsonStr = JSONObject.parseObject(content);
            return true;
        } catch (Exception e) {
            return false;
        }
    }

}