package hit.edu.zjc.Operator;

import hit.edu.zjc.Tool.ObjectConvert;
import hit.edu.zjc.Tool.RWObject;
import hit.edu.zjc.UserInterface.UDColumInfo;
import hit.edu.zjc.UserInterface.UserDef;
import hit.edu.zjc.test.OperatorObjectRW;

import java.io.ByteArrayInputStream;
import java.io.DataInputStream;
import java.io.UnsupportedEncodingException;
import java.lang.reflect.Array;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Hashtable;
import java.util.Iterator;
import java.util.Random;

import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
import edu.uci.ics.hyracks.api.job.JobSpecification;
import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
import edu.uci.ics.hyracks.dataflow.common.comm.util.FrameUtils;
import edu.uci.ics.hyracks.dataflow.common.data.marshalling.Integer64SerializerDeserializer;
import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
import edu.uci.ics.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryInputUnaryOutputOperatorNodePushable;
/*
 *输入为，FileScanner读入的部分的data
 *读入本NC实体识别的EntityList
 *根据List和Data，以及用户的定义UserDef
 *找到合适的值进行填充
 *输出正确的data
 * */
public class InconsistencyFixOperatorDescriptor extends AbstractSingleActivityOperatorDescriptor {

    private static final long serialVersionUID = 1L;
    private ISerializerDeserializer[] DataISD_array=null;
    private UserDef[] UD_array=null;
    private String ByteFilePath=null;//PartitionEntityListFilePath ArrayList<HashSet<Integer>>
    private Hashtable<Integer,String[]> DataTable=null;
    private ArrayList<HashSet<Integer>> EntityList=null;
    public InconsistencyFixOperatorDescriptor(JobSpecification spec,ISerializerDeserializer[] isd_array,UserDef[] ud_array,String filepath) {
        super(spec, 1, 1);//输入输出端的数量。
        DataISD_array=isd_array;
        UD_array=ud_array;
        ByteFilePath=filepath;
        recordDescriptors[0] = new RecordDescriptor(DataISD_array);//输出数据的格式。
    }

    @Override
    public IOperatorNodePushable createPushRuntime(final IHyracksTaskContext ctx,
            IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) throws HyracksDataException {

        final FrameTupleAccessor accessor = new FrameTupleAccessor(ctx.getFrameSize(), new RecordDescriptor(DataISD_array));

        return new AbstractUnaryInputUnaryOutputOperatorNodePushable() {

        	  private ArrayTupleBuilder tupleBuilder;
        	
            private ByteBuffer outputBuffer;

            private FrameTupleAppender outputAppender;

            @Override
            public void open() throws HyracksDataException {
                outputBuffer = ctx.allocateFrame();
                tupleBuilder = new ArrayTupleBuilder(DataISD_array.length);          
                outputAppender = new FrameTupleAppender(ctx.getFrameSize());
                outputAppender.reset(outputBuffer, true);
                
                EntityList=(ArrayList<HashSet<Integer>>)RWObject.ReadObject(ByteFilePath);
                DataTable=new Hashtable<Integer,String[]>();
                
                writer.open();       
               }

            @Override
            public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
                accessor.reset(buffer);//buffer内装的是输入的数据
                int tupleCount = accessor.getTupleCount();
                int fieldCount = accessor.getFieldCount();
                byte[] byte_array=accessor.getBuffer().array();
                //accessor.prettyPrint();
                int fieldSlotsLength=accessor.getFieldSlotsLength();
                for (int i = 0; i < tupleCount; i++) {
                	try{
	                	 String[] value=new String[DataISD_array.length];
	                	
	                    int tupleStartOffset = accessor.getTupleStartOffset(i);//获得第i个Tuple的开始位置。
	                    
	                    int Data_ID=0;//数据条目的主键值。
	                    
		                      //获取数据条目DataID的值：
							  int fieldstartoffset=accessor.getFieldStartOffset(i, 0);
							  int fieldendoffset=accessor.getFieldEndOffset(i, 0);
							  int fieldlength=accessor.getFieldLength(i, 0);   
							  String data_str=UTF8StringSerializerDeserializer.INSTANCE.deserialize( 
									new DataInputStream(
											new ByteArrayInputStream(byte_array,tupleStartOffset+fieldSlotsLength+fieldstartoffset,fieldlength)));
							  Data_ID=Integer.parseInt(data_str);//数据条目的主键值获取完毕。
	                     value[0]=data_str;//记录第一列的值
							  	 //获取其他属性值
		                    for(int fIdx=1;fIdx<DataISD_array.length;fIdx++)
		                    	 {
		                    		
		                    	fieldstartoffset=accessor.getFieldStartOffset(i, fIdx);
		                    	fieldendoffset=accessor.getFieldEndOffset(i, fIdx);
		                    	fieldlength=accessor.getFieldLength(i, fIdx);
		                    	    //将field从二进制中提取出来。
		                    		
									data_str=UTF8StringSerializerDeserializer.INSTANCE.deserialize( new DataInputStream(new ByteArrayInputStream(byte_array,tupleStartOffset+fieldSlotsLength+fieldstartoffset,fieldlength)));
		                       value[fIdx]=data_str;	            	
		                    	}
		                    //String mes="";
		                    //for(int i1=0;i1<value.length;i1++)
		                    //	mes=mes+value[i1]+"|";
		                    //System.out.println("VALUE:"+mes);
		                    DataTable.put(Data_ID, value);
                			}
            	   catch (Exception e) {
						e.printStackTrace();//产生异常的原因是字符串编码不是UTF-8的。
				 	}
                }
            }

            @Override
            public void fail() throws HyracksDataException {
            }

            @Override
            public void close() throws HyracksDataException {
               
            	for(int entitylist_index=0;entitylist_index<EntityList.size();entitylist_index++)
            	{
            		HashSet<Integer> set=EntityList.get(entitylist_index);//获取一个实体组
            		String[] correctvalue=new String[DataISD_array.length];
            		ArrayList<String[]> relevantData=new ArrayList<String[]>();
            		Iterator<Integer>it=set.iterator();
            		while(it.hasNext())
            		{
            			Integer id=it.next();
            			String[] str_array=DataTable.get(id);
            			if(str_array==null)
            				continue;
            			relevantData.add(str_array);
            		}
            		for(int i=0;i<correctvalue.length;i++)
            		{
            			UserDef ud=UD_array[i];
            			if(!ud.isCheck||ud.isNull)//不检查，或允许空
            			{
            				correctvalue[i]="";
            			}
            			else//复杂填充
            			{
            				String[] colum_data=new String[relevantData.size()];
            				for(int k=0;k<relevantData.size();k++)
            				{
            					String[] tuple_data=relevantData.get(k); 
            					colum_data[k]=tuple_data[i];
            				}
            				correctvalue[i]=ud.complexFill(colum_data);
            				if(correctvalue[i]==null)
            					correctvalue[i]="";
            			}
            		}
            		//完成对一个实体最终属性的确定。
            		//将实体data写出去。
            		//String mes="";
               // for(int i1=0;i1<correctvalue.length;i1++)
               //     	mes=mes+correctvalue[i1]+"|";
                //System.out.println("CORRECTVALUE:"+mes);
            		
            		
            		
            		tupleBuilder.reset();
            		for(int i=0;i<DataISD_array.length;i++)
            		{
            			tupleBuilder.addField(DataISD_array[i], correctvalue[i]);
            		}
            		outputAppender.reset(outputBuffer, false);
                if (!outputAppender.append(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray(), 0,
                            tupleBuilder.getSize())) 
                	{
                        FrameUtils.flushFrame(outputBuffer, writer);
                        outputAppender.reset(outputBuffer, true);
                        if (!outputAppender.append(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray(), 0,
                                tupleBuilder.getSize())) 
                        	  {
                            throw new HyracksDataException(
                                    "Failed to copy an record into a frame: the record size is too large.");
                              }
                    }
            		
            		
            	}
            	if (outputAppender.getTupleCount() > 0)
            	{
                     FrameUtils.flushFrame(outputBuffer, writer);
                }
              
                outputAppender = null;
                outputBuffer = null;
                writer.close();
            }
        };
    }
}