package hit.edu.zjc.Operator.ValueFill;

import hit.edu.zjc.DataClean.DataCleaner;
import hit.edu.zjc.DataIntegration.DataInteDis;
import hit.edu.zjc.Operator.Entity.DataIDSimilarityPair;
import hit.edu.zjc.Tool.ObjectConvert;
import hit.edu.zjc.Tool.RWObject;
import hit.edu.zjc.Tool.ShowOutPut;
import hit.edu.zjc.UserInterface.UDColumInfo;
import hit.edu.zjc.UserInterface.UserDef;
import hit.edu.zjc.marshalling.ByteSerializerDeserializer;

import java.io.ByteArrayInputStream;
import java.io.DataInputStream;
import java.io.File;
import java.io.UnsupportedEncodingException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashSet;
import java.util.Hashtable;
import java.util.Iterator;
import java.util.Random;

import edu.uci.ics.hyracks.api.comm.IFrameWriter;
import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
import edu.uci.ics.hyracks.api.job.JobSpecification;
import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
import edu.uci.ics.hyracks.dataflow.common.comm.util.FrameUtils;
import edu.uci.ics.hyracks.dataflow.common.data.marshalling.Integer64SerializerDeserializer;
import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
import edu.uci.ics.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryInputOperatorNodePushable;
import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryInputUnaryOutputOperatorNodePushable;

public class NullValueSimilarityComputeOperatorDescriptor extends
		AbstractSingleActivityOperatorDescriptor {

	private static final long serialVersionUID = 1L;
	private ISerializerDeserializer[] DataISD_array = null;
	private UserDef[] UD_array=null;
	private String FilePath = null;//CompletedNullGramMap file path
	private Hashtable<String, HashSet<Integer>>[] GramMap = null;//读入
	private Hashtable<Integer,HashSet<Integer>> SetTable=new Hashtable<Integer,HashSet<Integer>>();//输出

	public NullValueSimilarityComputeOperatorDescriptor(JobSpecification spec,
			ISerializerDeserializer[] isd_array, UserDef[] ud_array,
			String filepath) {
		super(spec, 1, 1);// 输入输出端的数量。0输出SetList，1输出SetTable
		DataISD_array = isd_array;
		UD_array = ud_array;
		recordDescriptors[0] = new RecordDescriptor(new ISerializerDeserializer[]{ByteSerializerDeserializer.INSTANCE});// 输出数据的格式。
		FilePath = filepath;
	}

	@Override
    public IOperatorNodePushable createPushRuntime(final IHyracksTaskContext ctx,
            IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) throws HyracksDataException {

        final FrameTupleAccessor accessor = new FrameTupleAccessor(ctx.getFrameSize(), new RecordDescriptor(DataISD_array));

        return new AbstractUnaryInputUnaryOutputOperatorNodePushable() {

        	  
            private ByteBuffer outputBuffer;

            @Override
            public void open() throws HyracksDataException {
                outputBuffer = ctx.allocateFrame();
                GramMap=(Hashtable<String,HashSet<Integer>>[])RWObject.ReadObject(FilePath);//读取数据。
                writer.open();
               }

            @Override
            public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
                accessor.reset(buffer);
                int tupleCount = accessor.getTupleCount();
                int fieldCount = accessor.getFieldCount();
                byte[] byte_array=accessor.getBuffer().array();
                int fieldSlotsLength=accessor.getFieldSlotsLength();
                for (int i = 0; i < tupleCount; i++) {
                    int tupleStartOffset = accessor.getTupleStartOffset(i);//获得第i个Tuple的开始位置。
                    int tupleEndOffset=accessor.getTupleEndOffset(i);
                    try {
                    	
                        int udci_i=0;//udci_array的index。
                        int Data_ID=0;//数据条目的主键值。

                              //获取数据条目DataID的值：
							  int fieldstartoffset=accessor.getFieldStartOffset(i, 0);
							  int fieldendoffset=accessor.getFieldEndOffset(i, 0);
							  int fieldlength=accessor.getFieldLength(i, 0);   
							  String data_str=UTF8StringSerializerDeserializer.INSTANCE.deserialize( 
									new DataInputStream(
											new ByteArrayInputStream(byte_array,tupleStartOffset+fieldSlotsLength+fieldstartoffset,fieldlength)));
							  Data_ID=Integer.parseInt(data_str);//记录数据条目的主键值。
							  for(int k=0;k<UD_array.length;k++)
							  {
								  int[]cid=UD_array[k].RelevantColumID;//获取相关列号
								  if(cid==null)
									  continue;
								  HashSet<Integer>SimilarSet=new HashSet<Integer>();
								  for(int m=0;m<cid.length;m++)
								  {
									  DataIDSimilarityPair pair=new DataIDSimilarityPair();
									  int fIdx=cid[m];
									  fieldstartoffset=accessor.getFieldStartOffset(i, fIdx);
					               fieldendoffset=accessor.getFieldEndOffset(i, fIdx);
					               fieldlength=accessor.getFieldLength(i, fIdx);   
					               data_str=UTF8StringSerializerDeserializer.INSTANCE.deserialize( 
			                    			new DataInputStream(
			                    					new ByteArrayInputStream(byte_array,tupleStartOffset+fieldSlotsLength+fieldstartoffset,fieldlength)));
			                     data_str=data_str.toUpperCase();
			                     data_str=data_str.replaceAll(" ", "");
			                     
			                           //开始计算相似度：计算1Gram的相似度
			                     for(int str_index=0;str_index<data_str.length();str_index++)
			                     	   {
			                    	   String substring=data_str.substring(str_index, str_index+1);//取出1Gram
			                    	 	HashSet<Integer>set=GramMap[fIdx].get(substring);
			                    	 	if(set!=null)
			                    	 		{
			                    	 		Iterator<Integer> iterator=set.iterator();
			                    	 		while(iterator.hasNext())
			                    	 			{
			                    	 			Integer data_id=iterator.next();
			                    	 			double weight=1.0/data_str.length();
			                    	 			pair.addOneGramWeight(data_id, weight);
			                    	 			}
			                    	 		}
			                           }
			                     
			                 	       //计算2Gram相似度。
				                  for(int str_index=0;str_index<data_str.length()-1;str_index++)
				                     	 {
				                    	 String substring=data_str.substring(str_index, str_index+2);
				                    	 HashSet<Integer>set=GramMap[fIdx].get(substring);
				                    	 if(set!=null)
				                    	 	 {
				                    		 Iterator<Integer>iterator=set.iterator();
				                    		 while(iterator.hasNext())
				                    		 	 {
				                    			 Integer data_id=iterator.next();
				                    			 double weight=1.0/(data_str.length()-1);
				                    			 pair.addTwoGramWeight(data_id,weight);
				                    		 	 }
				                    	 	 }
				                         }
				                  pair.refresh(1);//找出最大的weight
				                  //HashSet<Integer> set=pair.getKeySet(UDColumInfo.GetThreshold());//相似的set。
				                  if(m==0)
				                  		{
				                	  SimilarSet.addAll(pair.getKeySet(UserDef.Simalarity));
				                  		}
				                  else
				                  		{
				                	  SimilarSet.retainAll(pair.getKeySet(UserDef.Simalarity));
				                  		}
				                  
								  }
								  Iterator<Integer>it=SimilarSet.iterator();
								  while(it.hasNext())
								  {
									  Integer id=it.next();
									  HashSet<Integer>set=SetTable.get(id);
									  if(set==null)
										  set=new HashSet<Integer>();
									  set.add(Data_ID);
									  SetTable.put(id, set);
								  }
							  }						 
                    	} 
                    catch (Exception e) {
								e.printStackTrace();
								//System.out.println("产生异常,原因是字符串编码不是UTF-8.");
						 }
                	}
                //showArrayList(SetList);
            	}

            @Override
            public void fail() throws HyracksDataException {
               }

            
            private void WriteByteBuffer(byte[] data) throws HyracksDataException
            	{
            		outputBuffer.clear();
            	   if(data.length<outputBuffer.capacity())
               		{
                   outputBuffer.put(data);
                   FrameUtils.flushFrame(outputBuffer, writer);
               		}
               else//一个frame装不下
               		{
               		int byteremain=data.length;
               		int i=0;
               		for(i=0;byteremain>=outputBuffer.capacity();i++)
               			{
               			outputBuffer.put(data,i*outputBuffer.capacity(),outputBuffer.capacity());
                   		FrameUtils.flushFrame(outputBuffer, writer);
                   		outputBuffer.clear();
                   		byteremain=byteremain-outputBuffer.capacity();
               			}
               		if(byteremain>0)
               			{
               			outputBuffer.put(data,i*outputBuffer.capacity(),byteremain);
               			FrameUtils.flushFrame(outputBuffer, writer);
               			}
               		}
            	}
            
            @Override
            public void close() throws HyracksDataException {
            		//?
                byte[] objbytes=ObjectConvert.ObjectToByteArray(SetTable);
                WriteByteBuffer(objbytes);
                outputBuffer= null;
                writer.close();
            }
        };
    }
}