package hit.edu.zjc.Operator;

import hit.edu.zjc.Tool.MessyCodeCheck;
import hit.edu.zjc.UserInterface.UserDef;

import java.io.ByteArrayInputStream;
import java.io.DataInput;
import java.io.DataInputStream;
import java.io.UnsupportedEncodingException;
import java.nio.ByteBuffer;
import java.util.Random;

import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
import edu.uci.ics.hyracks.api.job.JobSpecification;
import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
import edu.uci.ics.hyracks.dataflow.common.comm.util.FrameUtils;
import edu.uci.ics.hyracks.dataflow.common.data.marshalling.Integer64SerializerDeserializer;
import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
import edu.uci.ics.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryInputUnaryOutputOperatorNodePushable;
import edu.uci.ics.hyracks.dataflow.std.file.DelimitedDataTupleParserFactory;

/*
 * 增加了乱码检查的功能。
 * */
public class ValueInvalidDetectOperatorDescriptor extends AbstractSingleActivityOperatorDescriptor {

    private static final long serialVersionUID = 1L;
    private ISerializerDeserializer[] DataISD_array=null;
    private UserDef[] UD_array=null;
    public ValueInvalidDetectOperatorDescriptor(JobSpecification spec,ISerializerDeserializer[] isd_array,UserDef[] ud_array)throws HyracksDataException {
        super(spec, 1, 1);//输入输出端的数量。
        DataISD_array=isd_array;
        UD_array=ud_array;
        recordDescriptors[0] = new RecordDescriptor(DataISD_array);//输出数据的格式。
        if(DataISD_array.length!=UD_array.length)
        	 throw new HyracksDataException("UserDef[] and DataISD[] are not the same length.");
    }

    /*
     * (non-Javadoc)
     * 
     * @see
     * edu.uci.ics.hyracks.api.dataflow.IActivity#createPushRuntime(edu.uci.ics.hyracks.api.context.IHyracksTaskContext,
     * edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider, int, int)
     */
    @Override
    public IOperatorNodePushable createPushRuntime(final IHyracksTaskContext ctx,
            IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) throws HyracksDataException {

        final FrameTupleAccessor accessor = new FrameTupleAccessor(ctx.getFrameSize(), new RecordDescriptor(DataISD_array));

        return new AbstractUnaryInputUnaryOutputOperatorNodePushable() {

            private ArrayTupleBuilder tupleBuilder;

            private ByteBuffer outputBuffer;

            private FrameTupleAppender outputAppender;

            @Override
            public void open() throws HyracksDataException {
                tupleBuilder = new ArrayTupleBuilder(DataISD_array.length);
                outputBuffer = ctx.allocateFrame();
                outputAppender = new FrameTupleAppender(ctx.getFrameSize());
                outputAppender.reset(outputBuffer, true);
                writer.open();
            }

            @Override
            public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
                accessor.reset(buffer);
                int tupleCount = accessor.getTupleCount();
                int fieldCount = accessor.getFieldCount();
                int fieldSlotsLength=accessor.getFieldSlotsLength();
                byte[] byte_array=accessor.getBuffer().array();
                for (int i = 0; i < tupleCount; i++) {
                    int tupleStartOffset = accessor.getTupleStartOffset(i);//获得第i个Tuple的开始位置。
                    tupleBuilder.reset();
                    	 //对每个tuple中的每个field进行：UserDef的检查。
                    try {
	                    for(int fIdx=0;fIdx<fieldCount;fIdx++)
	                    	 {
	                    	int fieldstartoffset=accessor.getFieldStartOffset(i, fIdx);
	                    	int fieldendoffset=accessor.getFieldEndOffset(i, fIdx);
	                    	int fieldlength=accessor.getFieldLength(i, fIdx);
	                    	//String data_str= new String(byte_array,tupleStartOffset+fieldSlotsLength+fieldstartoffset,fieldlength);//将field从二进制中提取出来。
								String data_str=UTF8StringSerializerDeserializer.INSTANCE.deserialize( new DataInputStream(new ByteArrayInputStream(byte_array,tupleStartOffset+fieldSlotsLength+fieldstartoffset,fieldlength)));
	                    	//System.out.print("length:"+fieldlength+" "+data_str+",");
								boolean isMessy=MessyCodeCheck.isMessyCode(data_str);//检查乱码
	                    	if(isMessy)//乱码
	                    		{
	                    		data_str=null;//按空处理。
	                    		}
	                    	if(UD_array[fIdx].isCheck)
								{
	                    		
		                    	boolean isvalid=UD_array[fIdx].checkdata(data_str);//进行异常值检查。
		                    		//此处应当记录清洗日志。
		                    	if(!isvalid)//检测出异常，进行异常值处理,记录清洗日志
		                    		{
	                    			data_str=UD_array[fIdx].correctdata();
		                    		}
								}
	                    	if(data_str==null)//处理一下空值
                    			data_str="";
	                    	tupleBuilder.addField(DataISD_array[fIdx], data_str);//将最终结果写进去。
	                    	 }
                    } catch (Exception e) {
								e.printStackTrace();//产生异常的原因是字符串编码不是UTF-8的。
								System.out.println("产生异常,原因是字符串编码不是UTF-8.");
						 }                  
                    outputAppender.reset(outputBuffer, false);
                    if (!outputAppender.append(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray(), 0,
                            tupleBuilder.getSize())) {
                        FrameUtils.flushFrame(outputBuffer, writer);
                        outputAppender.reset(outputBuffer, true);
                        if (!outputAppender.append(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray(), 0,
                                tupleBuilder.getSize())) {
                            throw new HyracksDataException(
                                    "Failed to copy an record into a frame: the record size is too large.");
                        }
                    }
                }
            }

            @Override
            public void fail() throws HyracksDataException {
            }

            @Override
            public void close() throws HyracksDataException {
                if (outputAppender.getTupleCount() > 0) {
                    FrameUtils.flushFrame(outputBuffer, writer);
                }
                outputAppender = null;
                outputBuffer = null;
                writer.close();
            }
        };
    }
}