/*
 * To change this license header, choose License Headers in Project Properties.
 * To change this template file, choose Tools | Templates
 * and open the template in the editor.
 */
package cn.ac.iie.ulss.data.a2b.rocketmq;

import cn.ac.iie.common.util.misc.MiscTools;
import cn.ac.iie.ulss.data.a2b.commons.RuntimeEnv;
import static cn.ac.iie.ulss.data.a2b.server.Dataa2bStartUp.a2bLinkedBlockingQueue;
import static cn.ac.iie.ulss.data.a2b.server.Dataa2bStartUp.protocol;
import com.alibaba.rocketmq.client.consumer.DefaultMQPushConsumer;
import com.alibaba.rocketmq.client.consumer.listener.ConsumeConcurrentlyContext;
import com.alibaba.rocketmq.client.consumer.listener.ConsumeConcurrentlyStatus;
import com.alibaba.rocketmq.client.consumer.listener.MessageListenerConcurrently;
import com.alibaba.rocketmq.common.consumer.ConsumeFromWhere;
import com.alibaba.rocketmq.common.message.MessageExt;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.Iterator;
import java.util.List;
import org.apache.avro.Schema;
import org.apache.avro.generic.GenericArray;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.GenericDatumReader;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.io.BinaryDecoder;
import org.apache.avro.io.DatumReader;
import org.apache.avro.io.DecoderFactory;
import org.apache.log4j.Logger;

/**
 *
 * @author Li Mingyang
 */
public class RocketMQConsumer {

    public static Logger log = Logger.getLogger(RocketMQConsumer.class.getName());
    public static DefaultMQPushConsumer consumer = null;

    public void startConsumer(String topic) throws Exception {
        log.info("start consumer the data from rocketmq");
        consumer = new DefaultMQPushConsumer(RuntimeEnv.getParamAsString(RuntimeEnv.SRC_GROUP));
        consumer.setNamesrvAddr(RuntimeEnv.getParamAsString(RuntimeEnv.SRC_NAMESERVER));
        consumer.setInstanceName("ulss-data-a2b" + MiscTools.getHostName());
        consumer.setConsumeConcurrentlyMaxSpan(5000);
        consumer.setPullThresholdForQueue(8);
        consumer.setPullBatchSize(8);
        consumer.setConsumeThreadMin(6);
        consumer.setConsumeThreadMax(9);
        consumer.setConsumeMessageBatchMaxSize(4);
        consumer.setClientCallbackExecutorThreads(24);
        consumer.setPullInterval(0);
        consumer.setConsumeFromWhere(ConsumeFromWhere.CONSUME_FROM_LAST_OFFSET);
        consumer.subscribe(topic, "*");
        final Schema docsSchema = protocol.getType("docs");
        final DatumReader<GenericRecord> docsReader = new GenericDatumReader(docsSchema);
        Schema grSingleDocSchema = protocol.getType(RuntimeEnv.getParamAsString(RuntimeEnv.TABLENAME));
        final DatumReader<GenericRecord> singleDocsReader = new GenericDatumReader(grSingleDocSchema);
        final String key = RuntimeEnv.getParamAsString(RuntimeEnv.CONDITION).split("[,]")[0];
        final String value = RuntimeEnv.getParamAsString(RuntimeEnv.CONDITION).split("[,]")[1];
        consumer.setConsumeFromWhere(ConsumeFromWhere.CONSUME_FROM_FIRST_OFFSET);
        consumer.registerMessageListener(new MessageListenerConcurrently() {
            @Override
            public ConsumeConcurrentlyStatus consumeMessage(List<MessageExt> msgs,
                    ConsumeConcurrentlyContext context) {
                for (MessageExt msg : msgs) {
                    try {
                        byte[] binaryData = msg.getBody();
                        ByteArrayInputStream docsbis = new ByteArrayInputStream(binaryData);
                        BinaryDecoder docsbd = new DecoderFactory().binaryDecoder(docsbis, null);
                        GenericRecord docsRecord = new GenericData.Record(docsSchema);
                        docsReader.read(docsRecord, docsbd);
                        GenericArray docSet = (GenericArray) docsRecord.get("doc_set");
                        log.info("get " + docSet.size() + " " + RuntimeEnv.getParamAsString(RuntimeEnv.TABLENAME) + " records and size " + binaryData.length + " bytes");
                        Iterator itor = docSet.iterator();
                        while (itor.hasNext()) {
                            ByteArrayInputStream databis = new ByteArrayInputStream(((ByteBuffer) itor.next()).array());
                            BinaryDecoder dataDecoder = new DecoderFactory().binaryDecoder(databis, null);
                            GenericRecord record = (GenericRecord) singleDocsReader.read(null, dataDecoder);
                            if (record.get(key).toString().equalsIgnoreCase(value)) {
                                a2bLinkedBlockingQueue.add(record);
                            }
                        }
                    } catch (IOException ex) {
                        log.error(ex.getMessage(), ex);
                    }
                }
                return ConsumeConcurrentlyStatus.CONSUME_SUCCESS;
            }
        });
        consumer.start();
        log.info("start consumer data from rocketmq");
    }
}
