package hit.edu.zjc.Operator.ValueFill;

import hit.edu.zjc.Tool.MessyCodeCheck;
import hit.edu.zjc.Tool.RWObject;
import hit.edu.zjc.Tool.ShowOutPut;
import hit.edu.zjc.UserInterface.UserDef;

import java.io.ByteArrayInputStream;
import java.io.DataInput;
import java.io.DataInputStream;
import java.io.UnsupportedEncodingException;
import java.nio.ByteBuffer;
import java.util.Enumeration;
import java.util.HashSet;
import java.util.Hashtable;
import java.util.Iterator;
import java.util.Random;
import java.util.Set;

import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
import edu.uci.ics.hyracks.api.job.JobSpecification;
import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
import edu.uci.ics.hyracks.dataflow.common.comm.util.FrameUtils;
import edu.uci.ics.hyracks.dataflow.common.data.marshalling.Integer64SerializerDeserializer;
import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
import edu.uci.ics.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryInputUnaryOutputOperatorNodePushable;
import edu.uci.ics.hyracks.dataflow.std.file.DelimitedDataTupleParserFactory;

/*
 * 增加了乱码检查的功能。
 * */
public class NullValueFillOperatorDescriptor extends AbstractSingleActivityOperatorDescriptor {

    private static final long serialVersionUID = 1L;
    private ISerializerDeserializer[] DataISD_array=null;
    private UserDef[] UD_array=null;
    private String FilePath;//CompletedNullValueSetTableFileName
    private Hashtable<Integer,HashSet<Integer>>SetTable=null;
    private Hashtable<Integer,String[]>ContentTable=null;
    public NullValueFillOperatorDescriptor(JobSpecification spec,ISerializerDeserializer[] isd_array,UserDef[] ud_array,String filepath)throws HyracksDataException {
        super(spec, 1, 1);//输入输出端的数量。
        DataISD_array=isd_array;
        UD_array=ud_array;
        recordDescriptors[0] = new RecordDescriptor(DataISD_array);//输出数据的格式。
        FilePath=filepath;
    }

    @Override
    public IOperatorNodePushable createPushRuntime(final IHyracksTaskContext ctx,
            IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) throws HyracksDataException {

        final FrameTupleAccessor accessor = new FrameTupleAccessor(ctx.getFrameSize(), new RecordDescriptor(DataISD_array));

        return new AbstractUnaryInputUnaryOutputOperatorNodePushable() {

            private ArrayTupleBuilder tupleBuilder;

            private ByteBuffer outputBuffer;

            private FrameTupleAppender outputAppender;

            @Override
            public void open() throws HyracksDataException {
                tupleBuilder = new ArrayTupleBuilder(DataISD_array.length);
                outputBuffer = ctx.allocateFrame();
                outputAppender = new FrameTupleAppender(ctx.getFrameSize());
                outputAppender.reset(outputBuffer, true);
                SetTable=(Hashtable<Integer,HashSet<Integer>>)RWObject.ReadObject(FilePath);
                ContentTable=new Hashtable<Integer,String[]>();
                writer.open();
            }
            
            public boolean isStore(Integer id)
            	{
            		Enumeration<Integer>e=SetTable.keys();
            		while(e.hasMoreElements())
            		{
            			Integer null_id=e.nextElement();
            			if(id.intValue()== null_id.intValue())
            				return true;
            			HashSet<Integer>set=SetTable.get(null_id);
            			if(set.contains(id))
            	         return true;
            		}
            		return false;
            	}

            public boolean isKey(Integer id)
            	{
            		Set<Integer> keyset=SetTable.keySet();
            		return keyset.contains(id);
            	}
            
            @Override
            public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
                accessor.reset(buffer);
                int tupleCount = accessor.getTupleCount();
                int fieldCount = accessor.getFieldCount();
                int fieldSlotsLength=accessor.getFieldSlotsLength();
                byte[] byte_array=accessor.getBuffer().array();
                for (int i = 0; i < tupleCount; i++) {
                    int tupleStartOffset = accessor.getTupleStartOffset(i);//获得第i个Tuple的开始位置。
                    tupleBuilder.reset();
                    
                    int Data_ID=0;              	 
	                 String[] content=new String[fieldCount];		                    
                    for(int fIdx=0;fIdx<fieldCount;fIdx++)
                    	 {
                    	int fieldstartoffset=accessor.getFieldStartOffset(i, fIdx);
                    	int fieldendoffset=accessor.getFieldEndOffset(i, fIdx);
                    	int fieldlength=accessor.getFieldLength(i, fIdx);
							content[fIdx]=UTF8StringSerializerDeserializer.INSTANCE.deserialize( new DataInputStream(new ByteArrayInputStream(byte_array,tupleStartOffset+fieldSlotsLength+fieldstartoffset,fieldlength)));
                    	tupleBuilder.addField(DataISD_array[fIdx], content[fIdx]);//将最终结果写进去。	
                    	 }
                    Data_ID=Integer.parseInt(content[0]);
                    if(isStore(Data_ID))
                    	 {
                    	ContentTable.put(Data_ID, content);//保存在内存中
                    	//if(isKey(Data_ID))
                    	continue;
                    	 }
            
                    outputAppender.reset(outputBuffer, false);
                    if (!outputAppender.append(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray(), 0,
                            tupleBuilder.getSize())) {
                        FrameUtils.flushFrame(outputBuffer, writer);
                        outputAppender.reset(outputBuffer, true);
                        if (!outputAppender.append(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray(), 0,
                                tupleBuilder.getSize())) {
                            throw new HyracksDataException(
                                    "Failed to copy an record into a frame: the record size is too large.");
                        		}
                    	}
                }
            }

            @Override
            public void fail() throws HyracksDataException {
               }

            
            public HashSet<Integer> ValidSimilarity(int[] CID,String[] keycontent,HashSet<Integer> set)
               {
            		Iterator<Integer> it=set.iterator();
            		HashSet<Integer> reSet=new HashSet<Integer>();
            		while(it.hasNext())
            		{
            			Integer otherid=it.next();
            			String[] othercontent=ContentTable.get(otherid);
            			int k=0;
            			for(;k<CID.length;k++)
            			{
            				int fIdx=CID[k];
            				String keystr=keycontent[fIdx];
            				String otherstr=othercontent[fIdx];
            				int onegram=0;
            				int twogram=0;
            				for(int index=0;index<otherstr.length();index++)
            				{
            					String substring=otherstr.substring(index, index+1);
            					int r=keystr.indexOf(substring);
            					if(r!=-1)
            						onegram++;
            				}
            				for(int index=0;index<otherstr.length()-1;index++)
            				{
            					String substring=otherstr.substring(index,index+2);
            					int r=keystr.indexOf(substring);
            					if(r!=-1)
            						twogram++;
            				}
            				double rate1,rate2;
            				rate1=1.0*onegram/keystr.length();
            				rate2=1.0*twogram/keystr.length();
            				if(rate1>=UserDef.Simalarity||rate2>=UserDef.Simalarity)
            					continue;
            				else
            					break;
            			}
            			if(k==CID.length)
            				reSet.add(otherid);
            			
            		}
            		
            		
            		return reSet;
               }
            
            public String[] GetSimilarityValue(int k,HashSet<Integer>set)
            	{
            		String[] res=new String[set.size()];
            		Iterator<Integer> it=set.iterator();
            		for(int i=0;it.hasNext();i++)
            		{
            			Integer id=it.next();
            			String[] content=ContentTable.get(id);
            			res[i]=content[k];
            		}
            		return res;
            	}
            @Override
            public void close() throws HyracksDataException {
            
            		Enumeration<Integer>e=SetTable.keys();
            		while(e.hasMoreElements())
            		{
            			Integer key=e.nextElement();
            			String[]key_content=ContentTable.get(key);
            			if(key_content==null)
            			{
            				ShowOutPut.show("NotFind:"+key);
            				continue;
            			}
            			for(int k=0;k<key_content.length;k++)
            			{
            				boolean isValid=UD_array[k].checkdata(key_content[k]);
            				if(isValid)
        					continue;
        					int[] ColumId=UD_array[k].RelevantColumID;
        					if(ColumId==null)
        						continue;
        					HashSet<Integer>set=SetTable.get(key);//找到所有和他相似的id集合
        					//进行验证。
        					set=ValidSimilarity(ColumId,key_content,set);
        					String[] input=GetSimilarityValue(k,set);
        					key_content[k]=UD_array[k].complexFill(input);
        					if(key_content[k]==null)
        						key_content[k]="";
            				
            			}
            			ContentTable.put(key, key_content);
            			
            		}
            		//将这些曾经出现过空值的数据写出去。以及其他附属数据
            		Enumeration<Integer> keys=ContentTable.keys();
            		while(keys.hasMoreElements())
            		{
            			Integer dataid=keys.nextElement();
            			String[] datacontent=ContentTable.get(dataid);
            			tupleBuilder.reset();
            			for(int m=0;m<datacontent.length;m++)
            				tupleBuilder.addField(DataISD_array[m], datacontent[m]);//将最终结果写进去。	
            			
            			 outputAppender.reset(outputBuffer, false);
                         if (!outputAppender.append(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray(), 0,
                                 tupleBuilder.getSize())) {
                             FrameUtils.flushFrame(outputBuffer, writer);
                             outputAppender.reset(outputBuffer, true);
                             if (!outputAppender.append(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray(), 0,
                                     tupleBuilder.getSize())) {
                                 throw new HyracksDataException(
                                         "Failed to copy an record into a frame: the record size is too large.");
                             		}
                         	}	
            		}
                if (outputAppender.getTupleCount() > 0) {
                    FrameUtils.flushFrame(outputBuffer, writer);
                    }
                outputAppender = null;
                outputBuffer = null;
                writer.close();
            }
        };
    }
}