package cn.ac.iie.ulss.dataredistribution.handler;

import cn.ac.iie.ulss.dataredistribution.commons.GlobalVariables;
import cn.ac.iie.ulss.dataredistribution.commons.RuntimeEnv;
import cn.ac.iie.ulss.dataredistribution.tools.HNode;
import cn.ac.iie.ulss.dataredistribution.tools.DataProducer;
import cn.ac.iie.ulss.dataredistribution.tools.Rule;
import cn.ac.iie.ulss.dataredistribution.tools.INode;
import java.io.ByteArrayOutputStream;
import java.nio.ByteBuffer;
import java.util.Iterator;
import java.util.Map;
import java.util.HashMap;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.avro.Protocol;
import org.apache.avro.Schema;
import org.apache.avro.generic.GenericArray;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.GenericDatumWriter;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.io.BinaryEncoder;
import org.apache.avro.io.DatumWriter;
import org.apache.avro.io.EncoderFactory;
import org.apache.log4j.PropertyConfigurator;
import org.apache.avro.util.Utf8;

/**
 *
 * @author: evan
 * @date: 2014-10-15
 */
public class DataSenderThread implements Runnable {

    String table = null;
    ConcurrentHashMap<String, Rule> tableToRule = null;
    Rule rule = null;
    ConcurrentHashMap<String, ConcurrentHashMap<HNode, ConcurrentHashMap<INode, ConcurrentLinkedQueue>>> tableToMSGStation = null;
    ConcurrentHashMap<HNode, ConcurrentHashMap<INode, ConcurrentLinkedQueue>> msgStation = null;
    int sendSize = 1000;
    int datasenderLimitTime = 10000;
    String docsSchemaContent = null;
    Protocol protocoldocs = null;
    Schema docs = null;
    int count = 0;
    ConcurrentHashMap<String, Map<Utf8, Utf8>> tableToDocDesc = null;
    Map<Utf8, Utf8> docDesc = null;
    ConcurrentHashMap<String, String> tableToTopic = null;
    Map<String, DataProducer> topicToProducer = null;
    Map<String, AtomicLong> tableToSendThreadCount = null;
    AtomicLong sendThreadCount = null;
    ConcurrentHashMap<String, INode> intervalToSNode = null;
    static org.apache.log4j.Logger logger = null;

    static {
        PropertyConfigurator.configure("log4j.properties");
        logger = org.apache.log4j.Logger.getLogger(DataSenderThread.class.getName());
    }

    public DataSenderThread(String table) {
        this.table = table;
    }

    @Override
    public void run() {
        tableToRule = (ConcurrentHashMap<String, Rule>) RuntimeEnv.getParam(GlobalVariables.TABLE_TO_RULE);
        rule = tableToRule.get(table);
        tableToMSGStation = (ConcurrentHashMap<String, ConcurrentHashMap<HNode, ConcurrentHashMap<INode, ConcurrentLinkedQueue>>>) RuntimeEnv.getParam(GlobalVariables.TABLE_TO_MSGSTATION);
        msgStation = tableToMSGStation.get(table);
        sendSize = (Integer) RuntimeEnv.getParam(RuntimeEnv.SEND_SIZE);
        datasenderLimitTime = (Integer) RuntimeEnv.getParam(RuntimeEnv.DATASENDER_LIMITTIME) * 1000;
        docsSchemaContent = (String) RuntimeEnv.getParam(GlobalVariables.DOCS_SCHEMA_CONTENT);
        protocoldocs = Protocol.parse(docsSchemaContent);
        docs = protocoldocs.getType(GlobalVariables.DOCS);
        tableToDocDesc = (ConcurrentHashMap<String, Map<Utf8, Utf8>>) RuntimeEnv.getParam(GlobalVariables.TABLE_TO_DOC_DESC);
        docDesc = tableToDocDesc.get(table);
        tableToTopic = (ConcurrentHashMap<String, String>) RuntimeEnv.getParam(GlobalVariables.TABLE_TO_TOPIC);
        topicToProducer = (Map<String, DataProducer>) RuntimeEnv.getParam(GlobalVariables.TOPIC_TO_PRODUCER);
        tableToSendThreadCount = (Map<String, AtomicLong>) RuntimeEnv.getParam(GlobalVariables.TABLE_TO_SENDTHREAD_COUNT);
        sendThreadCount = tableToSendThreadCount.get(table);
        intervalToSNode = (ConcurrentHashMap<String, INode>) RuntimeEnv.getParam(GlobalVariables.INTERVAL_TO_SNODE);

        byte[] sendData = null;
        while (true) {
            for (Iterator<Map.Entry<HNode, ConcurrentHashMap<INode, ConcurrentLinkedQueue>>> itr = msgStation.entrySet().iterator(); itr.hasNext();) {
                Map.Entry<HNode, ConcurrentHashMap<INode, ConcurrentLinkedQueue>> er = itr.next();
                HNode rnode = er.getKey();
                ConcurrentHashMap<INode, ConcurrentLinkedQueue> chm = er.getValue();
                for (Iterator<Map.Entry<INode, ConcurrentLinkedQueue>> its = chm.entrySet().iterator(); its.hasNext();) {
                    Map.Entry<INode, ConcurrentLinkedQueue> es = its.next();
                    INode snode = es.getKey();
                    ConcurrentLinkedQueue clq = es.getValue();
                    synchronized (snode) {
                        if (!snode.used.get()) {
                            snode.used.compareAndSet(false, true);
                        } else {
                            continue;
                        }
                    }
                    try {
                        String sendIP = "";
                        Long f_id = 0L;
                        String road = "";
                        String[] pa = rnode.getPartType().split("\\|");
                        long version = Long.parseLong(pa[pa.length - 1]);
                        if (clq.isEmpty() && (System.currentTimeMillis() - snode.time) >= 1000000) {
                            synchronized (chm) {
                                chm.remove(snode);
                                intervalToSNode.remove(table + snode.interval);
                            }
                            FileOperator.removeFile(table + snode.interval + rnode.getName() + version);
                        } else if (clq.isEmpty()) {
                        } else if (clq.size() >= sendSize || (System.currentTimeMillis() - snode.time) >= datasenderLimitTime) {
                            while (true) {
                                logger.info("-------"+table+"----"+snode.interval+"---------"+rnode.getName()+"--------"+version);
                                Object[] obj = FileOperator.getFileFromValueToFile(table + snode.interval + rnode.getName() + version);
                                if(obj==null){
                                    logger.info("the sendip or road or f_id for " + table + " " + snode.interval + " " + rnode.getName() + " is null");
                                    GetFileFromMetaStore gffm = new GetFileFromMetaStore(snode.interval, rnode, table);
                                    gffm.getFileForInverval();
                                    obj = FileOperator.getFileFromValueToFile(table + snode.interval + rnode.getName() + version);
                                }
                                if (obj != null) {
                                    if (obj[0] == null) {
                                        sendIP = "";
                                        logger.error("the sendIP is null");
                                    } else {
                                        sendIP = (String) obj[0];
                                    }

                                    if (obj[1] == null) {
                                        f_id = 0L;
                                    } else {
                                        f_id = (Long) obj[1];
                                    }

                                    if (obj[2] == null) {
                                        road = "";
                                        logger.error("the road is null");
                                    } else {
                                        road = (String) obj[2];
                                    }

                                    if (sendIP.equals("") || f_id == 0L || road.equals("")) {
                                        FileOperator.removeFile(table + snode.interval + rnode.getName() + version);
                                        if (obj[1] != null) {
                                            GetFileFromMetaStore.setBad(table, f_id);
                                        }
                                        logger.info("the sendip or road or f_id for " + table + " " + snode.interval + " " + rnode.getName() + " is null");
                                        logger.info(version);
                                        GetFileFromMetaStore gffm = new GetFileFromMetaStore(snode.interval, rnode, table);
                                        gffm.getFileForInverval();
                                    } else {
                                        if (FileOperator.checkFile(f_id)) {
                                            break;
                                        } else {
                                            FileOperator.removeFile(table + snode.interval + rnode.getName() + version);
                                            GetFileFromMetaStore.setBad(table, f_id);
                                            GetFileFromMetaStore gffm = new GetFileFromMetaStore(snode.interval, rnode, table);
                                            gffm.getFileForInverval();
                                        }
                                    }
                                }
                                    
                               
                            }

                            sendData = pack(clq, f_id, road, sendIP);

                            if (sendData != null) {
                                String topic = tableToTopic.get(table);
                                DataProducer producer = topicToProducer.get(topic);
                                sendThreadCount.incrementAndGet();
                                producer.send(table, sendIP, count, snode.interval, sendData);
                                FileOperator.add(f_id, count);
                                snode.time = System.currentTimeMillis();
                            }
                        }
                    } finally {
                        synchronized (snode) {
                            snode.used.compareAndSet(true, false);
                        }
                    }
                }
            }
        }
    }

    /**
     *
     * package the data to a message
     */
    public byte[] pack(ConcurrentLinkedQueue clq, long fid, String road, String sendIP) {
        GenericRecord docsRecord = new GenericData.Record(docs);
        GenericArray docSet = new GenericData.Array<GenericRecord>((sendSize), docs.getField(GlobalVariables.DOC_SET).schema());
        count = 0;
        byte[] data = null;
        while (count < sendSize) {
            data = (byte[]) clq.poll();
            if (data != null) {
                docSet.add(ByteBuffer.wrap(data));
                count++;
            } else {
                break;
            }
        }

        if (count <= 0) {
            return null;
        }

        Map<Utf8, Utf8> user_desc = new HashMap<Utf8, Utf8>();
        user_desc.put(new Utf8("file_id"), new Utf8(String.valueOf(fid)));
        docsRecord.put(GlobalVariables.DOC_SCHEMA_NAME, table);
        docsRecord.put(GlobalVariables.DOC_DESC, docDesc);
        docsRecord.put(GlobalVariables.USER_DESC, user_desc);
        docsRecord.put(GlobalVariables.DOC_SET, docSet);

        DatumWriter<GenericRecord> docsWriter = new GenericDatumWriter<GenericRecord>(docs);
        ByteArrayOutputStream docsbaos = new ByteArrayOutputStream();
        BinaryEncoder docsbe = new EncoderFactory().binaryEncoder(docsbaos, null);
        try {
            docsWriter.write(docsRecord, docsbe);
            docsbe.flush();
        } catch (Exception ex) {
            logger.error(ex);
        }

        return docsbaos.toByteArray();
    }
}