package hit.edu.zjc.Operator;

import hit.edu.zjc.Tool.MessyCodeCheck;
import hit.edu.zjc.Tool.RWObject;
import hit.edu.zjc.UserInterface.UserDef;

import java.io.ByteArrayInputStream;
import java.io.DataInput;
import java.io.DataInputStream;
import java.io.UnsupportedEncodingException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.HashSet;
import java.util.Hashtable;
import java.util.Iterator;
import java.util.Random;
import java.util.Set;

import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
import edu.uci.ics.hyracks.api.job.JobSpecification;
import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
import edu.uci.ics.hyracks.dataflow.common.comm.util.FrameUtils;
import edu.uci.ics.hyracks.dataflow.common.data.marshalling.Integer64SerializerDeserializer;
import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
import edu.uci.ics.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryInputUnaryOutputOperatorNodePushable;
import edu.uci.ics.hyracks.dataflow.std.file.DelimitedDataTupleParserFactory;

/*
 * 增加了乱码检查的功能。
 * */
public class FinalIntegrationOperatorDescriptor extends AbstractSingleActivityOperatorDescriptor {

    private static final long serialVersionUID = 1L;
    private ISerializerDeserializer[] DataISD_array=null;
    private String EntityListPath;
    private String CorrectStringListPath;
    private ArrayList<HashSet<Integer>> EntityList=null;
    private ArrayList<String[]>ContentList=null;
    public FinalIntegrationOperatorDescriptor(JobSpecification spec,ISerializerDeserializer[] isd_array,String entityfilepath,String contentfilepath)throws HyracksDataException {
        super(spec, 1, 1);//输入输出端的数量。
        DataISD_array=isd_array;
        recordDescriptors[0] = new RecordDescriptor(DataISD_array);//输出数据的格式。
        EntityListPath=entityfilepath;
        CorrectStringListPath=contentfilepath;
    }

    @Override
    public IOperatorNodePushable createPushRuntime(final IHyracksTaskContext ctx,
            IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) throws HyracksDataException {

        final FrameTupleAccessor accessor = new FrameTupleAccessor(ctx.getFrameSize(), new RecordDescriptor(DataISD_array));

        return new AbstractUnaryInputUnaryOutputOperatorNodePushable() {

            private ArrayTupleBuilder tupleBuilder;

            private ByteBuffer outputBuffer;

            private FrameTupleAppender outputAppender;

            @Override
            public void open() throws HyracksDataException {
                tupleBuilder = new ArrayTupleBuilder(DataISD_array.length);
                outputBuffer = ctx.allocateFrame();
                outputAppender = new FrameTupleAppender(ctx.getFrameSize());
                outputAppender.reset(outputBuffer, true);
                EntityList=(ArrayList<HashSet<Integer>>)RWObject.ReadObject(EntityListPath);
                ContentList=(ArrayList<String[]>)RWObject.ReadObject(CorrectStringListPath);
                writer.open();
            }
   
            public boolean isOutPut(Integer id)
            	{
            		for(int k=0;k<EntityList.size();k++)
            		{
            			HashSet<Integer>set=EntityList.get(k);
            			if(set.contains(id))
            				return false;
            		}
            		return true;
            	}
            
            @Override
            public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
                accessor.reset(buffer);
                int tupleCount = accessor.getTupleCount();
                int fieldCount = accessor.getFieldCount();
                int fieldSlotsLength=accessor.getFieldSlotsLength();
                byte[] byte_array=accessor.getBuffer().array();
                for (int i = 0; i < tupleCount; i++) {
                    int tupleStartOffset = accessor.getTupleStartOffset(i);//获得第i个Tuple的开始位置。
                    tupleBuilder.reset();
                    
                    int Data_ID=0;              	 
	                 String[] content=new String[fieldCount];		                    
                    for(int fIdx=0;fIdx<fieldCount;fIdx++)
                    	 {
                    	int fieldstartoffset=accessor.getFieldStartOffset(i, fIdx);
                    	int fieldendoffset=accessor.getFieldEndOffset(i, fIdx);
                    	int fieldlength=accessor.getFieldLength(i, fIdx);
							content[fIdx]=UTF8StringSerializerDeserializer.INSTANCE.deserialize( new DataInputStream(new ByteArrayInputStream(byte_array,tupleStartOffset+fieldSlotsLength+fieldstartoffset,fieldlength)));
                    	tupleBuilder.addField(DataISD_array[fIdx], content[fIdx]);//将最终结果写进去。	
                    	 }
                    Data_ID=Integer.parseInt(content[0]);
                    if(!isOutPut(Data_ID))
                    	continue;
                    	 
            
                    outputAppender.reset(outputBuffer, false);
                    if (!outputAppender.append(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray(), 0,
                            tupleBuilder.getSize())) {
                        FrameUtils.flushFrame(outputBuffer, writer);
                        outputAppender.reset(outputBuffer, true);
                        if (!outputAppender.append(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray(), 0,
                                tupleBuilder.getSize())) {
                            throw new HyracksDataException(
                                    "Failed to copy an record into a frame: the record size is too large.");
                        		}
                    	}
                }
            }

            @Override
            public void fail() throws HyracksDataException {
            	}
            
            @Override
            public void close() throws HyracksDataException {

            		for(int k=0;k<ContentList.size();k++)
            		{
            			String[]datacontent=ContentList.get(k);
            			tupleBuilder.reset();
            			for(int m=0;m<datacontent.length;m++)
            				tupleBuilder.addField(DataISD_array[m], datacontent[m]);//将最终结果写进去。	
            			
            			 outputAppender.reset(outputBuffer, false);
                         if (!outputAppender.append(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray(), 0,
                                 tupleBuilder.getSize())) {
                             FrameUtils.flushFrame(outputBuffer, writer);
                             outputAppender.reset(outputBuffer, true);
                             if (!outputAppender.append(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray(), 0,
                                     tupleBuilder.getSize())) {
                                 throw new HyracksDataException(
                                         "Failed to copy an record into a frame: the record size is too large.");
                             		}
                         	}	
            		}
                if (outputAppender.getTupleCount() > 0) {
                    FrameUtils.flushFrame(outputBuffer, writer);
                    }
                outputAppender = null;
                outputBuffer = null;
                writer.close();
            }
        };
    }
}