package com.lhz.collector.sink;

import com.alibaba.fastjson.JSON;
import com.lhz.collector.parse.util.ParseUtil;
import com.lhz.collector.util.ESClientUtil;
import org.apache.flume.Channel;
import org.apache.flume.Context;
import org.apache.flume.Event;
import org.apache.flume.Transaction;
import org.apache.flume.conf.Configurable;
import org.apache.flume.conf.Configurables;
import org.apache.flume.sink.AbstractSink;
import org.elasticsearch.action.bulk.BulkRequestBuilder;
import org.elasticsearch.action.bulk.BulkResponse;
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.client.transport.TransportClient;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.util.ArrayList;
import java.util.Map;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;

/**
 * Created by LHZ on 2016/3/15.
 */
public class ElaticSearchSink extends AbstractSink implements Configurable {
    private static final Logger logger = LoggerFactory.getLogger(ElaticSearchSink.class);
    private String clusterName;
    private String hostNames;
    private TransportClient client;
    private String indexPrefix;
    private int batchSize;
    private int workThreadNum;
    private LinkedBlockingQueue<Event> eventQueue = new LinkedBlockingQueue<>(10000);
    private ArrayList<IndexRequestBuilder> queue = new ArrayList<>();
    private ExecutorService handlerService;

    @Override
    public void start() {
        this.client= ESClientUtil.getClient(clusterName,hostNames);
        handlerService = Executors.newFixedThreadPool(workThreadNum);
        for(int i=0;i<workThreadNum;i++){
            ParseHandler parseHandler=new ParseHandler();
            handlerService.submit(parseHandler);
        }
        Executors.newScheduledThreadPool(1).scheduleWithFixedDelay(()->{
            execute();
        },5,5, TimeUnit.SECONDS);
        super.start();
    }


    class ParseHandler implements Runnable {
        @Override
        public void run() {
            logger.info("ES ParseHandler starting.....");
            while(true){
                try {
                    Event event=eventQueue.take();
                    Map<String, Object> esMap= ParseUtil.parse(event);
                    if(esMap!=null&&esMap.size()>0){
                        String index= indexPrefix+esMap.get("@timestamp");
                        String type=esMap.get("documentType")+"";
                        esMap.remove("documentType");
                        String jsondata= JSON.toJSONString(esMap);
                        logger.debug(Thread.currentThread().getName()+"write to ES,data="+jsondata);
                        IndexRequestBuilder request = client.prepareIndex(index, type).setSource(jsondata);
                        synchronized(queue){
                            queue.add(request);
                            if (queue.size()>=batchSize){
                                execute();
                            }
                        }
                    }
                }catch (Exception e){
                    logger.error("ParseHandler happended exception...",e);
                }
            }
        }
    }


    /**
     * 写入ES
     */
    public void execute(){
        ArrayList<IndexRequestBuilder> requests = null;
        synchronized (queue) {
            if(queue.size()==0){
                return;
            }else{
                requests = queue;
                queue=new ArrayList<>();
            }
        }
        BulkRequestBuilder bulkRequest = client.prepareBulk();
        for (IndexRequestBuilder request : requests) {
            bulkRequest.add(request);
        }
        try{
            BulkResponse bulkResponse = bulkRequest.execute().actionGet();
            if (bulkResponse.hasFailures()) {
                logger.warn("write failed "+bulkResponse.buildFailureMessage());
            }else{
                logger.info("write successed,size="+requests.size());
            }
        }catch (Exception e){
            logger.error("");
        }

    }


    @Override
    public void configure(Context context) {
        Configurables.ensureRequiredNonNull(context,"clusterName");
        Configurables.ensureRequiredNonNull(context,"hostNames");
        this.clusterName=context.getString("clusterName");
        this.hostNames =context.getString("hostNames");
        this.indexPrefix=context.getString("indexPrefix","log_");
        this.batchSize=context.getInteger("batchSize",100);
        this.workThreadNum=context.getInteger("workThreadNum",2);
    }



    @Override
    public Status process(){
        logger.debug("processing...");
        Status status = Status.READY;
        Channel channel = getChannel();
        Transaction txn = channel.getTransaction();
        try {
            txn.begin();
            int count;
            for (count = 0; count <batchSize ; ++count) {
                Event event = channel.take();
                if (event == null) {
                    break;
                }
                eventQueue.put(event);
            }
            if (count <= 0) {
                status = Status.BACKOFF;
            } else {
                if (count < batchSize) {
                    status = Status.BACKOFF;
                }
            }
            txn.commit();
        } catch (Throwable ex) {
            logger.error("Exception in process",ex);
            txn.rollback();
        } finally {
            txn.close();
        }
        return status;
    }
}
