package hit.edu.zjc.DataClean;

import hit.edu.zjc.DataIntegration.DataInteDis;
import hit.edu.zjc.Operator.Build2GramMapOperatorDescriptor;
import hit.edu.zjc.Operator.DataSplitOperatorDescriptor;
import hit.edu.zjc.Operator.DoNothingOperatorDescriptor;
import hit.edu.zjc.Operator.FinalIntegrationOperatorDescriptor;
import hit.edu.zjc.Operator.InconsistencyFixByteOperatorDescriptor;
import hit.edu.zjc.Operator.InconsistencyFixOperatorDescriptor;
import hit.edu.zjc.Operator.NodeCleanUpOperatorDescriptor;
import hit.edu.zjc.Operator.SimilarityComputeOperatorDescriptor;
import hit.edu.zjc.Operator.SourceByteWriterOperatorDescriptor;
import hit.edu.zjc.Operator.SourceGramMapIntegrationOperatorDescriptor;
import hit.edu.zjc.Operator.SourceHashSetIntegrationOperatorDescriptor;
import hit.edu.zjc.Operator.SourceMatchOperatorDescriptor;
import hit.edu.zjc.Operator.SourceSetTableMergeOperatorDescriptor;
import hit.edu.zjc.Operator.SourceSimpleSetListIntegrationOperatorDescriptor;
import hit.edu.zjc.Operator.SourceStringListIntegrationOperatorDescriptor;
import hit.edu.zjc.Operator.ValueInvalidDetectOperatorDescriptor;
import hit.edu.zjc.Operator.ValueFill.NullValueFillOperatorDescriptor;
import hit.edu.zjc.Operator.ValueFill.NullValueSimilarityComputeOperatorDescriptor;
import hit.edu.zjc.Operator.ValueFill.ValueInvalidDetectNullGramBuildOperatorDescriptor;
import hit.edu.zjc.Tool.Algorithm;
import hit.edu.zjc.Tool.HyracksJobEx;
import hit.edu.zjc.Tool.ObjectConvert;
import hit.edu.zjc.Tool.RWObject;
import hit.edu.zjc.Tool.ShowOutPut;
import hit.edu.zjc.UserInterface.UDColumInfo;
import hit.edu.zjc.UserInterface.UserDef;
import hit.edu.zjc.marshalling.ByteSerializerDeserializer;
import hit.edu.zjc.parsers.ByteTupleParserFactory;

import java.util.*;
import java.io.File;

import edu.uci.ics.hyracks.api.constraints.PartitionConstraintHelper;
import edu.uci.ics.hyracks.api.dataflow.IConnectorDescriptor;
import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunctionFactory;
import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
import edu.uci.ics.hyracks.api.io.FileReference;
import edu.uci.ics.hyracks.api.job.IOperatorDescriptorRegistry;
import edu.uci.ics.hyracks.api.job.JobSpecification;
import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryHashFunctionFactory;
import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
import edu.uci.ics.hyracks.dataflow.common.data.parsers.IValueParserFactory;
import edu.uci.ics.hyracks.dataflow.common.data.parsers.UTF8StringParserFactory;
import edu.uci.ics.hyracks.dataflow.common.data.partition.FieldHashPartitionComputerFactory;
import edu.uci.ics.hyracks.dataflow.std.connectors.MToNPartitioningConnectorDescriptor;
import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
import edu.uci.ics.hyracks.dataflow.std.file.ConstantFileSplitProvider;
import edu.uci.ics.hyracks.dataflow.std.file.DelimitedDataTupleParserFactory;
import edu.uci.ics.hyracks.dataflow.std.file.FileScanOperatorDescriptor;
import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
import edu.uci.ics.hyracks.dataflow.std.file.FrameFileWriterOperatorDescriptor;
import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
import edu.uci.ics.hyracks.dataflow.std.file.PlainFileWriterOperatorDescriptor;
import edu.uci.ics.hyracks.dataflow.std.misc.SplitOperatorDescriptor;

public class DataCleaner {

	public static int GroupNodeNumber=5;//在进行数据集成时，一组NC的数量。分组集成，提高并行效率。GroupNodeNumber>=2!!
	public static final String ValueValidFileName="ValueValidData.txt";//进行完类型检查后的文件。
	public static final String HashMapBytesFileName="GramData.dat";//在建立GramMap后，输出的二进制文件。是在本地生成的。
	                                                                                //这也是后缀，是其他NC传过来的文件。NC的名字+GramData.dat.
	public static final String HashMapCompletedFileName="GramDataCompleted.dat";//集成后的完整的GramMap文件名。
	public static final String HashSetBytesFileName="HashSet.dat";//每个NC根据GramMap生成的不完整的分组集合
	public static final String SetTableBytesFileName="SetTable.dat";//每个NC根据GramMap生成的不完整的 HashTable<Integer,HashSet<Integer>>
	public static final String PartitionSetListFileName="PartitionSetList.dat";//分发到每个NC上的SetList二进制文件名。
	public static final String PartitionSetDataFileName="PartitionData.txt";//每个NC分到的用于实体识别第二阶段的部分数据
	public static final String PartitionEntityIDListFileName="PartitionEntityIDList.dat";
	public static final String CompletedEntityIDListFIleName="CompletedEntityIDList.dat";
	public static final String PartitionCorrectEntityDataFileName="PartitionCorrectData.txt";
	
	public static final String PartitionNullValueGramMapFileName="PartitionNullValueGramMap.dat";//值缺失
	public static final String CompletedNullValueGramMapFileName="CompletedNullValueGramMap.dat";
	public static final String PartitionNullValueSetTableFileName="PartitionNullValueSetTable.dat";
	public static final String CompletedNullValueSetTabelFileName="CompletedNullValueSetTable.dat";
	public static final String CompletedValidFilledFileName="CompletedValidFilledData.txt";
	public static final String PartitionCorrectEntityDataByteFileName="PartitionCorrectDataBytes.dat";
	public static final String CompletedCorrectEntityDataByteFileName="CompletedCorrectDataBytes.dat";
	
	public static final String FinalOutPutFileName="FinalData.txt";
	/*
	 * 数据清洗的第一步：空值检测、异常值检测、异常值处理。
	 * */

	public static void ValueInvalidDetectJob() throws HyracksDataException
	{
		
		JobSpecification spec = new JobSpecification();
		HyracksJobEx.initNC();
		UserDef[] ud_array=UserDef.GetUserDef();
		FileSplit[] splitdata=new FileSplit[HyracksJobEx.NC_ID_array.length];
		for(int i=0;i<HyracksJobEx.NC_ID_array.length;i++)
		{
			splitdata[i]=new FileSplit(HyracksJobEx.NC_ID_array[i],new FileReference(new File(DataInteDis.DataDir,DataInteDis.split_filename).getAbsoluteFile()));
		}

		ISerializerDeserializer[] isd=new ISerializerDeserializer[DataInteDis.Col_Number];
		for(int i=0;i<DataInteDis.Col_Number;i++)
		    isd[i]=UTF8StringSerializerDeserializer.INSTANCE;
		RecordDescriptor desc = new RecordDescriptor(isd);
		IValueParserFactory[] ipf=new IValueParserFactory[DataInteDis.Col_Number];
		for(int i=0;i<DataInteDis.Col_Number;i++)
			 ipf[i]=UTF8StringParserFactory.INSTANCE;
		//定义Scanner
		FileScanOperatorDescriptor[] Scanner_array=new FileScanOperatorDescriptor[HyracksJobEx.NC_ID_array.length];
		for(int i=0;i<HyracksJobEx.NC_ID_array.length;i++)
		{
			Scanner_array[i] = new FileScanOperatorDescriptor(
			        spec,
			        new ConstantFileSplitProvider(new FileSplit[]{splitdata[i]}),
			        new DelimitedDataTupleParserFactory(ipf, '|'),
			        desc);	
		}
		
		for(int i=0;i<HyracksJobEx.NC_ID_array.length;i++)
		{
			PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, Scanner_array[i],HyracksJobEx.NC_ID_array[i]);
		}
		//定义Operator
		IOperatorDescriptor[] VIDOp=new ValueInvalidDetectOperatorDescriptor[HyracksJobEx.NC_ID_array.length];
		for(int i=0;i<HyracksJobEx.NC_ID_array.length;i++)
			VIDOp[i]=new ValueInvalidDetectOperatorDescriptor(spec,isd,ud_array);
		for(int i=0;i<HyracksJobEx.NC_ID_array.length;i++)
		{
			PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, VIDOp[i], HyracksJobEx.NC_ID_array[i]);
		}
		//连接Scanner与Operator
		for(int i=0;i<HyracksJobEx.NC_ID_array.length;i++)
			spec.connect(new OneToOneConnectorDescriptor(spec), Scanner_array[i], 0, VIDOp[i], 0);
		
		//定义Writer
		 IOperatorDescriptor outputOp[] = new IOperatorDescriptor[HyracksJobEx.NC_ID_array.length];
		for (int i = 0; i < HyracksJobEx.NC_ID_array.length; i++) {
				outputOp[i] = new PlainFileWriterOperatorDescriptor(spec, new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(HyracksJobEx.NC_ID_array[i],new FileReference(new File(DataInteDis.DataDir,ValueValidFileName))) }), "|");
		      PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, outputOp[i], HyracksJobEx.NC_ID_array[i]);
		}
		//连接Operator与Writer
		for (int i = 0; i < HyracksJobEx.NC_ID_array.length; i++) {
		      spec.connect(new OneToOneConnectorDescriptor(spec), VIDOp[i], 0, outputOp[i], 0);
		}
		
		for (int i = 0; i < HyracksJobEx.NC_ID_array.length; i++) {
		      spec.addRoot(outputOp[i]);
		}
		HyracksJobEx.exe_job(spec, "InvalidValueDect");
		
		
	}

	
	public static void ValueInvalidDectectNullGramBuildJob()throws HyracksDataException
	{
		JobSpecification spec = new JobSpecification();
		HyracksJobEx.initNC();
		UserDef[] ud_array=UserDef.GetUserDef();
		FileSplit[] splitdata=new FileSplit[HyracksJobEx.NC_ID_array.length];
		for(int i=0;i<HyracksJobEx.NC_ID_array.length;i++)
		{
			splitdata[i]=new FileSplit(HyracksJobEx.NC_ID_array[i],new FileReference(new File(DataInteDis.DataDir,DataInteDis.split_filename).getAbsoluteFile()));
		}

		ISerializerDeserializer[] isd=new ISerializerDeserializer[DataInteDis.Col_Number];
		for(int i=0;i<DataInteDis.Col_Number;i++)
		    isd[i]=UTF8StringSerializerDeserializer.INSTANCE;
		RecordDescriptor desc = new RecordDescriptor(isd);
		IValueParserFactory[] ipf=new IValueParserFactory[DataInteDis.Col_Number];
		for(int i=0;i<DataInteDis.Col_Number;i++)
			 ipf[i]=UTF8StringParserFactory.INSTANCE;
		//定义Scanner
		FileScanOperatorDescriptor Scanner=new FileScanOperatorDescriptor(
			        spec,
			        new ConstantFileSplitProvider(splitdata),
			        new DelimitedDataTupleParserFactory(ipf, '|'),
			        desc);	
		PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, Scanner,HyracksJobEx.NC_ID_array);
		
		//定义Operator
		IOperatorDescriptor detector_builder=new ValueInvalidDetectNullGramBuildOperatorDescriptor(spec,isd,ud_array);
		PartitionConstraintHelper.addAbsoluteLocationConstraint(spec,detector_builder, HyracksJobEx.NC_ID_array);
		
		//连接Scanner与Operator
		spec.connect(new OneToOneConnectorDescriptor(spec), Scanner, 0, detector_builder, 0);
		
		//定义Writer 输出文本文件
		FileSplit[] txtoutFileSplits=new FileSplit[HyracksJobEx.NC_ID_array.length];
		for(int i=0;i<HyracksJobEx.NC_ID_array.length;i++)
		{
			txtoutFileSplits[i]=new FileSplit(HyracksJobEx.NC_ID_array[i],new FileReference(new File(DataInteDis.DataDir,ValueValidFileName)));
		}
		
		
		IOperatorDescriptor txtprinter = new PlainFileWriterOperatorDescriptor(spec, new ConstantFileSplitProvider(txtoutFileSplits), "|");
		PartitionConstraintHelper.addAbsoluteLocationConstraint(spec,txtprinter, HyracksJobEx.NC_ID_array);
		
		//定义Writer2 输出二进制文件
		
		FileSplit[] byteFileSplits=new FileSplit[HyracksJobEx.NC_ID_array.length];
		for(int i=0;i<HyracksJobEx.NC_ID_array.length;i++)
		{
			byteFileSplits[i]=new FileSplit(HyracksJobEx.NC_ID_array[i],new FileReference(new File(DataInteDis.DataDir,PartitionNullValueGramMapFileName)));
		}
		IOperatorDescriptor byteprinter=new FrameFileWriterOperatorDescriptor(spec, new ConstantFileSplitProvider(byteFileSplits));
		PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, byteprinter, HyracksJobEx.NC_ID_array);
		
		
		
		//连接Operator与Writer
		spec.connect(new OneToOneConnectorDescriptor(spec), detector_builder, 0, txtprinter, 0);
		spec.connect(new OneToOneConnectorDescriptor(spec), detector_builder, 1, byteprinter, 0);

		HyracksJobEx.exe_job(spec, "InvalidValueDectAndNullGramMapBuildJob");
		
	}
	
	/*
	 * 只在NC0上进行
	 * 并行度不好。
	 * */
	public static void NullValueFillJob() throws HyracksDataException
	{
		JobSpecification spec = new JobSpecification();
		HyracksJobEx.initNC();
		UserDef[] ud_array=UserDef.GetUserDef();
		FileSplit[] splitdata=new FileSplit[1];
		splitdata[0]=new FileSplit(HyracksJobEx.NC_ID_array[0],new FileReference(new File(DataInteDis.DataDir,DataInteDis.SumValid_filename).getAbsoluteFile()));

		ISerializerDeserializer[] isd=new ISerializerDeserializer[DataInteDis.Col_Number];
		for(int i=0;i<DataInteDis.Col_Number;i++)
		    isd[i]=UTF8StringSerializerDeserializer.INSTANCE;
		RecordDescriptor desc = new RecordDescriptor(isd);
		IValueParserFactory[] ipf=new IValueParserFactory[DataInteDis.Col_Number];
		for(int i=0;i<DataInteDis.Col_Number;i++)
			 ipf[i]=UTF8StringParserFactory.INSTANCE;
		//定义Scanner
		FileScanOperatorDescriptor Scanner=new FileScanOperatorDescriptor(
			        spec,
			        new ConstantFileSplitProvider(splitdata),
			        new DelimitedDataTupleParserFactory(ipf, '|'),
			        desc);	
		PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, Scanner,HyracksJobEx.NC_ID_array[0]);
		
		//定义Operator
		IOperatorDescriptor filler=new NullValueFillOperatorDescriptor(spec,isd,ud_array,new File(DataInteDis.DataDir,CompletedNullValueSetTabelFileName).getAbsolutePath());
		PartitionConstraintHelper.addAbsoluteLocationConstraint(spec,filler, HyracksJobEx.NC_ID_array[0]);
		
		//连接Scanner与Operator
		spec.connect(new OneToOneConnectorDescriptor(spec), Scanner, 0, filler, 0);
		
		//定义Writer 输出文本文件
		FileSplit[] txtoutFileSplits=new FileSplit[1];
		txtoutFileSplits[0]=new FileSplit(HyracksJobEx.NC_ID_array[0],new FileReference(new File(DataInteDis.DataDir,CompletedValidFilledFileName)));

		IOperatorDescriptor txtprinter = new PlainFileWriterOperatorDescriptor(spec, new ConstantFileSplitProvider(txtoutFileSplits), "|");
		PartitionConstraintHelper.addAbsoluteLocationConstraint(spec,txtprinter, HyracksJobEx.NC_ID_array[0]);
			
		//连接Operator与Writer
		spec.connect(new OneToOneConnectorDescriptor(spec), filler, 0, txtprinter, 0);
		HyracksJobEx.exe_job(spec, "NullValueFillJob");	
	}
	
	
	
	
	
	
	/*
	 * 数据清洗第二步：重复记录检测、实体识别：
	 *                                         1.建立每个NC的 (1、2)GramMap
	 *Hashtable<String,HashSet<Integer>>[] GramMap=new Hashtable<String,HashSet<Integer>>[];
	 *String 是Gram，HashSet<Integer>是含有该Gram的第一列的ID号。
	 *为每个关键列都建立一个HashTable
	 * */
	public static void Build2GramMapJob()
	{
		UDColumInfo[] udci_array=UDColumInfo.GetUDColumInfo();//获取用户的关键列设置。
		JobSpecification spec = new JobSpecification();
		HyracksJobEx.initNC();
		final int NodeNumber=HyracksJobEx.NC_ID_array.length;
		final int Col_Number=DataInteDis.Col_Number;
		FileSplit[] inputFiles=new FileSplit[NodeNumber];
		for(int i=0;i<NodeNumber;i++)
			inputFiles[i]=new FileSplit(HyracksJobEx.NC_ID_array[i],new FileReference(new File(DataInteDis.DataDir,DataCleaner.ValueValidFileName)));
		
		IFileSplitProvider splitProvider = new ConstantFileSplitProvider(inputFiles);
       ISerializerDeserializer[] isd=new ISerializerDeserializer[Col_Number];
       for(int i=0;i<Col_Number;i++)
           isd[i]=UTF8StringSerializerDeserializer.INSTANCE;
       RecordDescriptor desc = new RecordDescriptor(isd);
       IValueParserFactory[] ipf=new IValueParserFactory[Col_Number];
        for(int i=0;i<Col_Number;i++)
        	 ipf[i]=UTF8StringParserFactory.INSTANCE;
        FileScanOperatorDescriptor Scanner = new FileScanOperatorDescriptor(
                spec,
                splitProvider,
                new DelimitedDataTupleParserFactory(ipf, '|'),
                desc);
        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, Scanner,HyracksJobEx.NC_ID_array);
        
               
        Build2GramMapOperatorDescriptor builder = new Build2GramMapOperatorDescriptor(spec,isd,udci_array,HashMapBytesFileName);
        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, builder, HyracksJobEx.NC_ID_array);

        
      
        FileSplit[] outputFiles=new FileSplit[NodeNumber];
        for(int i=0;i<NodeNumber;i++)
        	outputFiles[i]=new FileSplit(HyracksJobEx.NC_ID_array[i],new FileReference(new File(DataInteDis.DataDir,HashMapBytesFileName)));//将清洗后的数据集成回NC[0]中
        
        IFileSplitProvider outSplits = new ConstantFileSplitProvider(outputFiles);
        IOperatorDescriptor printer = new FrameFileWriterOperatorDescriptor(spec, outSplits);
        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer,HyracksJobEx.NC_ID_array);
       
        spec.connect(new OneToOneConnectorDescriptor(spec), Scanner, 0, builder, 0);
        spec.connect(new OneToOneConnectorDescriptor(spec), builder, 0, printer, 0);
        spec.addRoot(printer);

        HyracksJobEx.exe_job(spec, "GramMapBuildJob");
		
	}

	private static String[] subarray(String[] strs,int begin,int length)
	{
		String[] res=new String[length];
		for(int i=0;i<length&&begin+i<strs.length;i++)
			res[i]=strs[begin+i];
		return res;
	}
	private static String[] subarray(ArrayList<String> strs,int begin,int length)
	{
		String[] res=new String[length];
		for(int i=0;i<length&&begin+i<strs.size();i++)
			res[i]=strs.get(begin+i);
		return res;
	}
	private static String[] addStringAhead(String str,String[]strs)
	{
		String[] res=new String[strs.length+1];
		res[0]=str;
		for(int i=0;i<strs.length;i++)
			res[1+i]=strs[i];
		return res;
	}
	
	/*
	 * 分组合并。例如有10个节点，定义一组有5个节点。
	 * 则NC0集成NC1、NC2、NC3、NC4的GramMap，并合并。
	 * NC5集成NC6、NC7、NC8、NC9的GramMap,并合并。
	 * 完成一次合并后，回对剩余节点继续执行相同的合并，反复执行，直到集成到NC0中。
	 * 最大幅度的提升了并行的效率，因为是多机同时进行集成操作。
	 * 不是串行的，是并行的呦。
	 * 不仅是collect，并且integration。
	 * */
	public static void GramDataCollectIntegrationJob()
	{
		Group_ByteDataCollectIntegrateJob(HyracksJobEx.NC_ID_array,HashMapBytesFileName,new IIntegrationScanner(){
			@Override
			public IOperatorDescriptor getOperatorDescriptor(JobSpecification spec,String[] FilePaths)
			{
				return new SourceGramMapIntegrationOperatorDescriptor(spec,FilePaths);
			}
		});
	}

	/*
	 * 将集成在NC0上的，合并后的GramMap.dat分发到每一台NC上。
	 * 分发后完整的GramMap文件名为：HashMapCompletedFileName
	 * */
	public static void DispatchGramMapJob()
	{
		DispatchBytesDataJob(HashMapBytesFileName,HashMapCompletedFileName,"GramMapCompletedDispatchJob");
	}
	
	/*
	 * 包含了：GramDataCollectIntegrationJob（），DispatchGramMapJob()两个函数的功能
	 * 使用了环形的并行模式
	 * */
	public static void GramDataIntegrationDispatchJob()
	{
		Loop_ByteDataIntegrationDispatchJob(HyracksJobEx.NC_ID_array,HashMapBytesFileName,new IIntegrationScanner(){
			@Override
			public IOperatorDescriptor getOperatorDescriptor(JobSpecification spec,String[] FilePaths)
			{
				return new SourceGramMapIntegrationOperatorDescriptor(spec,FilePaths);
			}
		},HashMapCompletedFileName);
	}

	
	public static void NullGramMapIntegrationDispatchJob()
	{
		Loop_ByteDataIntegrationDispatchJob(HyracksJobEx.NC_ID_array,PartitionNullValueGramMapFileName,new IIntegrationScanner(){
			@Override
			public IOperatorDescriptor getOperatorDescriptor(JobSpecification spec,String[] FilePaths)
			{
				return new SourceGramMapIntegrationOperatorDescriptor(spec,FilePaths);
			}
		},CompletedNullValueGramMapFileName);
	}
	
	public static void NullSetTableIntegrationDispatchJob()
	{
		Loop_ByteDataIntegrationDispatchJob(HyracksJobEx.NC_ID_array,PartitionNullValueSetTableFileName,new IIntegrationScanner(){
			@Override
			public IOperatorDescriptor getOperatorDescriptor(JobSpecification spec,String[] FilePaths)
			{
				return new SourceSetTableMergeOperatorDescriptor(spec,FilePaths);
			}
		},CompletedNullValueSetTabelFileName);
	}
	
	
	/*
	 * 抽象化的操作
	 * 将二进制文件，按分组合并的方式，
	 * 最终集成到NC_array[0]上。
	 * NC_array[]:参与集成的各个节点
	 * FileName:二进制文件名
	 * */
	private static void Group_ByteDataCollectIntegrateJob(String NC_array[],String FileName,IIntegrationScanner iis)
	{
		if(NC_array.length<2)
			return;
		
		JobSpecification spec;
	   JobSpecification integrationspec;
		HyracksJobEx.initNC();
		ArrayList<String> CurrentNC=new ArrayList<String>(NC_array.length);//含有二进制文件的NC名字。
		for(int i=0;i<NC_array.length;i++)
		{
			CurrentNC.add(i,NC_array[i]);
		}
		//进行防错处理。
		if(GroupNodeNumber<2)
		{
			spec = new JobSpecification();
			integrationspec=new JobSpecification();
			String[]FilePaths=SetByteDataCollect(spec,NC_array,NC_array[0],FileName);
			HyracksJobEx.exe_job(spec, "ByteDataCollectJob");//所有二进制文件汇总到NC[0]上。
			IOperatorDescriptor scanner = iis.getOperatorDescriptor(integrationspec, FilePaths);
			SetByteDataIntegration(integrationspec,NC_array[0],scanner,FileName);
			HyracksJobEx.exe_job(integrationspec, "ByteDataIntegrationJob");
			return;
		}
		//初始化内容结束。
		
		
	   
	   while(CurrentNC.size()>1)
	   {
		   int NodeSum=CurrentNC.size();
		   int groupNum=0;
		   int ex=0;
			spec = new JobSpecification();
			integrationspec=new JobSpecification();
		   
		   //当NC数量少于一组时
		   if(NodeSum<GroupNodeNumber)
		   	{
			   String[]FilePaths=SetByteDataCollect(spec,subarray(CurrentNC,0,CurrentNC.size()),CurrentNC.get(0),FileName);
			   IOperatorDescriptor scanner =iis.getOperatorDescriptor(integrationspec, FilePaths); 	   
			   SetByteDataIntegration(integrationspec,CurrentNC.get(0),scanner,FileName);
			   String nc0=CurrentNC.get(0);
			   CurrentNC=new ArrayList<String>();
			   CurrentNC.add(nc0);
		    }
		   else
		    {
			   groupNum=NodeSum/GroupNodeNumber;//整除的组数
			   ex=NodeSum%GroupNodeNumber;//余数。如果余数>(GroupNodeNumber/2),groupNum++;
			   int i=0;
			   for(;i<groupNum-1;i++)//先对前groupNum-1个进行处理
			   	{
				   String[] NodeCluster=new String[GroupNodeNumber];
				   for(int k=0;k<GroupNodeNumber;k++)
				    {
					   NodeCluster[k]=CurrentNC.get(i*GroupNodeNumber+k);
				    }
				   String[]FilePaths=SetByteDataCollect(spec,NodeCluster,CurrentNC.get(i*GroupNodeNumber),FileName);
				   IOperatorDescriptor scanner =iis.getOperatorDescriptor(integrationspec, FilePaths);
				   SetByteDataIntegration(integrationspec,CurrentNC.get(i*GroupNodeNumber),scanner,FileName);
				   
			   	}
			   //对最后一项进行特殊处理。根据余数大小不同，处理方法不同。 
			   if(ex>GroupNodeNumber/2)//新增一个组
			    {
				   String[] NodeCluster=new String[GroupNodeNumber];
				   for(int k=0;k<GroupNodeNumber;k++)
				    {
					   NodeCluster[k]=CurrentNC.get(i*GroupNodeNumber+k);
				    }
				   String[] FilePaths=SetByteDataCollect(spec,NodeCluster,CurrentNC.get(i*GroupNodeNumber),FileName);	
				   IOperatorDescriptor scanner = iis.getOperatorDescriptor(integrationspec, FilePaths);
						   //new SourceGramMapIntegrationOperatorDescriptor(integrationspec,FilePaths);
				   SetByteDataIntegration(integrationspec,CurrentNC.get(i*GroupNodeNumber),scanner,FileName);
				   //对最后剩余的几个NC进行合并。
				   i++;
				   String[] RestNC=new String[ex];
				   for(int k=0;k<ex;k++)
				    {
					   RestNC[k]=CurrentNC.get(i*GroupNodeNumber+k);
				    }
				   FilePaths=SetByteDataCollect(spec,RestNC,CurrentNC.get(i+GroupNodeNumber),FileName);
				   scanner =iis.getOperatorDescriptor(integrationspec, FilePaths); 
						  // new SourceGramMapIntegrationOperatorDescriptor(integrationspec,FilePaths);
				   SetByteDataIntegration(integrationspec,CurrentNC.get(i*GroupNodeNumber),scanner,FileName);
				
			    }
			   else//将剩余的合并。
			    {
				  String[] NodeCluster=new String[ex+GroupNodeNumber];
				  for(int k=0;k<ex+GroupNodeNumber;k++)
				   {
					  NodeCluster[k]=CurrentNC.get(i*GroupNodeNumber+k);
				   }
				  String[]FilePaths=SetByteDataCollect(spec,NodeCluster,NC_array[i*GroupNodeNumber],FileName);
				  IOperatorDescriptor scanner =iis.getOperatorDescriptor(integrationspec, FilePaths);
						  //new SourceGramMapIntegrationOperatorDescriptor(integrationspec,FilePaths);
				  SetByteDataIntegration(integrationspec,CurrentNC.get(i*GroupNodeNumber),scanner,FileName);
 
			    }
			   
			   //进行CurrentNC的移除
			   ArrayList<String> temp=CurrentNC;
			   CurrentNC=new ArrayList<String>();
			   for(int k=0;k<=i;k++)
			    {
				   CurrentNC.add(k, temp.get(k*GroupNodeNumber));
			    }
			    
		    }
	       HyracksJobEx.exe_job(spec, "ByteDataCollectJob");//执行合并操作。
	       HyracksJobEx.exe_job(integrationspec, "ByteDataIntegrationJob");
	   }
	}
/*
 * 集成操作的抽象化
 * Scanner是集成OP
 * FileName是集成后的文件名。
 * */	
   private static void SetByteDataIntegration(JobSpecification spec,String NC,IOperatorDescriptor Scanner,String FileName)
   {
 		PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, Scanner,NC);

         
 		FileSplit[] outputFiles=new FileSplit[1];
 	   outputFiles[0]=new FileSplit(NC,new FileReference(new File(DataInteDis.DataDir,FileName)));
 	        
 	   IFileSplitProvider outSplits = new ConstantFileSplitProvider(outputFiles);
 	   IOperatorDescriptor printer = new FrameFileWriterOperatorDescriptor(spec, outSplits);
       PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer,NC);
 	    spec.connect(new OneToOneConnectorDescriptor(spec), Scanner, 0, printer, 0);
   }
/*
 * 将各个NC生成的二进制文件集中。
 * String[] NodeCluster:需要集中的NC名
 * String DesNC：集中后的NC名
 * String FileName 二进制文件名
 * 可用于多步合并。
 * 输出：NC+FileName
 * 返回：生成的文件的文件名
 * */
	private static String[] SetByteDataCollect(JobSpecification spec,String[] NodeCluster,String DesNC,String FileName)
	{
		
		final int NC_Number=NodeCluster.length;
		String[] FilePaths=new String[NC_Number];
		for(int i=0;i<NC_Number;i++)
		{
			FilePaths[i]=new File(DataInteDis.DataDir,NodeCluster[i]+FileName).getAbsolutePath();
		}	
		
		FileSplit[][] inputFiles=new FileSplit[NC_Number][1];
		for(int i=0;i<NC_Number;i++)
			inputFiles[i][0]=new FileSplit(NodeCluster[i],new FileReference(new File(DataInteDis.DataDir,FileName)));
		
		IFileSplitProvider[] splitProvider =new IFileSplitProvider[NC_Number]; 
		for(int i=0;i<NC_Number;i++)
			splitProvider[i]=new ConstantFileSplitProvider(inputFiles[i]);
		ISerializerDeserializer[] isd=new ISerializerDeserializer[1];
	   isd[0]=ByteSerializerDeserializer.INSTANCE;
	   RecordDescriptor desc = new RecordDescriptor(isd);
		
		IOperatorDescriptor[] Scanner = new IOperatorDescriptor[NC_Number];
		for(int i=0;i<NC_Number;i++)
		{
			Scanner[i]=new FileScanOperatorDescriptor(
					spec,
					splitProvider[i],
					new ByteTupleParserFactory(),
                desc);
			PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, Scanner[i],NodeCluster[i]);
		}
    
       FileSplit[][] outputFiles=new FileSplit[NC_Number][1];
       for(int i=0;i<NC_Number;i++)
       	 {
    	   	outputFiles[i][0]=new FileSplit(DesNC,new FileReference(new File(FilePaths[i])));//将清洗后的数据集成回DesNC中
         }
       
        IFileSplitProvider[] outSplits =new IFileSplitProvider[NC_Number];
        for(int i=0;i<NC_Number;i++)
          {
        	outSplits[i]=new ConstantFileSplitProvider(outputFiles[i]);
          }
        
        IOperatorDescriptor[] printer = new IOperatorDescriptor[NC_Number];
        for(int i=0;i<NC_Number;i++)
          {
        	printer[i]=new FrameFileWriterOperatorDescriptor(spec, outSplits[i]);
        	PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer[i],DesNC);
          }
        
        for(int i=0;i<NC_Number;i++)
          {
        	spec.connect(new OneToOneConnectorDescriptor(spec), Scanner[i], 0, printer[i], 0);
          }
        return FilePaths;
	}

	
	
	/*环状Integration、Dispatch。
	 * 相当与Integration与Dispatch同时做，增加了并行效率.最有并行效率的方式。
	 * nc1-》nc2-》nc3-》nc1
	 * N个节点，做N-1次相互传输
	 * 每次传输，传输一个文件
	 * 最后，所有机器做Integration操作。
	 * */
	private static String[] Loop_ByteDataIntegrationDispatchJob(String NC_array[],String BaseName,IIntegrationScanner iis,String IntegratedFileName)
	{
		if(NC_array.length<2)
			return new String[]{BaseName};		
		JobSpecification spec;
		final int NC_Number=NC_array.length;
		HyracksJobEx.initNC();
		//最后一次循环是为了文件名的统一，详见函数后段的Integration部分
		for(int loop=0;loop<NC_Number;loop++)
		{
			spec=new JobSpecification();
			FileSplit[][] inputFiles=new FileSplit[NC_Number][1];
			for(int i=0;i<NC_Number;i++)
			{
				String infilename=GetLoopScanFileName(loop,i,NC_array,BaseName);
				inputFiles[i][0]=new FileSplit(NC_array[i],new FileReference(new File(DataInteDis.DataDir,infilename)));
			}
			
			IFileSplitProvider[] splitProvider =new IFileSplitProvider[NC_Number]; 
			for(int i=0;i<NC_Number;i++)
			{
				splitProvider[i]=new ConstantFileSplitProvider(inputFiles[i]);
			}
			ISerializerDeserializer[] isd=new ISerializerDeserializer[1];
		   isd[0]=ByteSerializerDeserializer.INSTANCE;
		   RecordDescriptor desc = new RecordDescriptor(isd);
			
			IOperatorDescriptor[] Scanner = new IOperatorDescriptor[NC_Number];
			for(int i=0;i<NC_Number;i++)
			{
				Scanner[i]=new FileScanOperatorDescriptor(
						spec,
						splitProvider[i],
						new ByteTupleParserFactory(),
	                desc);
				PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, Scanner[i],NC_array[i]);
			}
			
			
			String[] outFilePaths=new String[NC_Number];
			for(int i=0;i<NC_Number;i++)
			{
				String filename=GetLoopPrintFileName(loop,i,NC_array,BaseName);
				outFilePaths[i]=new File(DataInteDis.DataDir,filename).getAbsolutePath();
			}	
			 FileSplit[][] outputFiles=new FileSplit[NC_Number][1];
		    for(int i=0;i<NC_Number;i++)
		     {
		    	   	outputFiles[i][0]=new FileSplit(NC_array[i],new FileReference(new File(outFilePaths[i])));
		     }
		       
		    IFileSplitProvider[] outSplits =new IFileSplitProvider[NC_Number];
		    for(int i=0;i<NC_Number;i++)
		     {
		         outSplits[i]=new ConstantFileSplitProvider(outputFiles[i]);
		     }
		        
		    IOperatorDescriptor[] printer = new IOperatorDescriptor[NC_Number];
		    for(int i=0;i<NC_Number;i++)
		     {
		         printer[i]=new FrameFileWriterOperatorDescriptor(spec, outSplits[i]);
		         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer[i],NC_array[i]);
		     }
		     //构成环状连接。首位相连。   
		    for(int i=0;i<NC_Number;i++)
		     {
		    	   int des=i+1;
		    	   if(des==NC_array.length)
		    		   des=0;
		         spec.connect(new OneToOneConnectorDescriptor(spec), Scanner[i], 0, printer[des], 0);
		     }
			HyracksJobEx.exe_job(spec, "LoopDispatchJob");		
		}
		
		//下面开始进行合并部分
		String[] FilePaths=new String[NC_Number];
		for(int i=0;i<NC_Number;i++)
		{
			FilePaths[i]=new File(DataInteDis.DataDir,NC_array[i]+BaseName).getAbsolutePath();
		}
		
		if(iis==null)
			return FilePaths;
		JobSpecification integrationspec=new JobSpecification();
		

		
		for(int i=0;i<NC_Number;i++)
		{

			IOperatorDescriptor scanner=iis.getOperatorDescriptor(integrationspec, FilePaths);
			PartitionConstraintHelper.addAbsoluteLocationConstraint(integrationspec,scanner,NC_array[i]);
			
			FileSplit[] outputFiles=new FileSplit[1];
		 	outputFiles[0]=new FileSplit(NC_array[i],new FileReference(new File(DataInteDis.DataDir,IntegratedFileName)));
		 	        
		 	IFileSplitProvider outSplits = new ConstantFileSplitProvider(outputFiles);
		 	IOperatorDescriptor printer = new FrameFileWriterOperatorDescriptor(integrationspec, outSplits);
		   PartitionConstraintHelper.addAbsoluteLocationConstraint(integrationspec, printer,NC_array[i]);
		 	
		   integrationspec.connect(new OneToOneConnectorDescriptor(integrationspec), scanner, 0, printer, 0);
	    }
		HyracksJobEx.exe_job(integrationspec, "LoopIntegrationJob");	
		return FilePaths;
	}
	/*
	 * loop:循环的次数
	 * nodeid：该node索引
	 * return：该node应该要传送的本地的文件名。 
	 * */
	private static String GetLoopScanFileName(int loop,int nodeid,String NC_array[],String FileName)
	{
		if(loop==0)
			return FileName;
		int beforenode=nodeid-loop;
		if(beforenode<0)
			beforenode=beforenode+NC_array.length;
		return NC_array[beforenode]+FileName;
	}
	private static String GetLoopPrintFileName(int loop,int nodeid,String NC_array[],String FileName)
	{
		int beforenode=nodeid-loop-1;
		if(beforenode<0)
			beforenode=beforenode+NC_array.length;
		return NC_array[beforenode]+FileName;
	}
	
	

	public static void DispatchBytesDataJob(String fromFileName,String toFileName,String JobName)
	{
		  HyracksJobEx.initNC();
        final int outputArity = HyracksJobEx.NC_ID_array.length;
        JobSpecification spec = new JobSpecification();

        File[] outputFile = new File[outputArity];
        for (int i = 0; i < outputArity; i++) {
            outputFile[i] = new File(DataInteDis.DataDir,toFileName);
         }
        IFileSplitProvider splitProvider = new ConstantFileSplitProvider(new FileSplit[] {
                new FileSplit(HyracksJobEx.NC_ID_array[0], new FileReference(new File(DataInteDis.DataDir,fromFileName))) });//默认在NC1上执行。
        ISerializerDeserializer[] isd=new ISerializerDeserializer[1];
        isd[0]=ByteSerializerDeserializer.INSTANCE;
        RecordDescriptor desc = new RecordDescriptor(isd);
        FileScanOperatorDescriptor Scanner = new FileScanOperatorDescriptor(
                spec,
                splitProvider,
                new ByteTupleParserFactory(),
                desc);

        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, Scanner, HyracksJobEx.NC_ID_array[0]);

        SplitOperatorDescriptor splitOp = new SplitOperatorDescriptor(spec, desc, outputArity);
        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, splitOp, HyracksJobEx.NC_ID_array[0]);

        IOperatorDescriptor outputOp[] = new IOperatorDescriptor[outputFile.length];
        for (int i = 0; i < outputArity; i++) {
            outputOp[i] = new FrameFileWriterOperatorDescriptor(spec, new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(HyracksJobEx.NC_ID_array[i],
                    outputFile[i].getAbsolutePath()) }));
            PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, outputOp[i], HyracksJobEx.NC_ID_array[i]);
        }

        spec.connect(new OneToOneConnectorDescriptor(spec), Scanner, 0, splitOp, 0);
        for (int i = 0; i < outputArity; i++) {
            spec.connect(new OneToOneConnectorDescriptor(spec), splitOp, i, outputOp[i], 0);
        }

        for (int i = 0; i < outputArity; i++) {
            spec.addRoot(outputOp[i]);
        }
       HyracksJobEx.exe_job(spec, JobName);
	}
	
	
	/*
	 * 数据清洗第二步：重复记录检测、实体识别：
	 *                                         3.读取完整的GramMap，根据输入，建立相关集合。
	 * 相似度计算：相似度=字符串相似度 X 分配的权值+字符串相似度 X 分配的权值+。。。
	 * 字符串相似度：max{匹配的2gram数/总2gram数， 相同字的数量/总字的数量 }
	 * 相似度超过用户定义的阀值，则相似，将两条记录加入一组中。
	 * */
	public static void BuildRelatedSetJob()
	{
		UDColumInfo[] udci_array=UDColumInfo.GetUDColumInfo();//获取用户的关键列设置。
		JobSpecification spec = new JobSpecification();
		HyracksJobEx.initNC();
		final int NodeNumber=HyracksJobEx.NC_ID_array.length;
		final int Col_Number=DataInteDis.Col_Number;
		FileSplit[] inputFiles=new FileSplit[NodeNumber];
		for(int i=0;i<NodeNumber;i++)
			inputFiles[i]=new FileSplit(HyracksJobEx.NC_ID_array[i],new FileReference(new File(DataInteDis.DataDir,DataCleaner.ValueValidFileName)));
		
		IFileSplitProvider splitProvider = new ConstantFileSplitProvider(inputFiles);
       ISerializerDeserializer[] isd=new ISerializerDeserializer[Col_Number];
       for(int i=0;i<Col_Number;i++)
           isd[i]=UTF8StringSerializerDeserializer.INSTANCE;
       RecordDescriptor desc = new RecordDescriptor(isd);
       IValueParserFactory[] ipf=new IValueParserFactory[Col_Number];
        for(int i=0;i<Col_Number;i++)
        	 ipf[i]=UTF8StringParserFactory.INSTANCE;
        FileScanOperatorDescriptor Scanner = new FileScanOperatorDescriptor(
                spec,
                splitProvider,
                new DelimitedDataTupleParserFactory(ipf, '|'),
                desc);
        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, Scanner,HyracksJobEx.NC_ID_array);
        
               
        IOperatorDescriptor builder = new SimilarityComputeOperatorDescriptor(spec,isd,udci_array,new File(DataInteDis.DataDir,HashMapCompletedFileName).getAbsolutePath());
        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, builder, HyracksJobEx.NC_ID_array);

        
      
        FileSplit[] outputFiles=new FileSplit[NodeNumber];
        for(int i=0;i<NodeNumber;i++)
        	outputFiles[i]=new FileSplit(HyracksJobEx.NC_ID_array[i],new FileReference(new File(DataInteDis.DataDir,HashSetBytesFileName)));
        
        IFileSplitProvider outSplits = new ConstantFileSplitProvider(outputFiles);
        IOperatorDescriptor SetListprinter = new FrameFileWriterOperatorDescriptor(spec, outSplits);
        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, SetListprinter,HyracksJobEx.NC_ID_array);
       
        outputFiles=new FileSplit[NodeNumber];
        for(int i=0;i<NodeNumber;i++)
        	outputFiles[i]=new FileSplit(HyracksJobEx.NC_ID_array[i],new FileReference(new File(DataInteDis.DataDir,SetTableBytesFileName)));
        
        outSplits = new ConstantFileSplitProvider(outputFiles);
        IOperatorDescriptor SetTableprinter = new FrameFileWriterOperatorDescriptor(spec, outSplits);
        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, SetTableprinter,HyracksJobEx.NC_ID_array);
        
        
        
        
        spec.connect(new OneToOneConnectorDescriptor(spec), Scanner, 0, builder, 0);
        spec.connect(new OneToOneConnectorDescriptor(spec), builder, 0, SetListprinter, 0);//生成SetList
        spec.connect(new OneToOneConnectorDescriptor(spec), builder, 1, SetTableprinter, 0);//生成SetTable

        HyracksJobEx.exe_job(spec, "BuildRelatedSetJob");
	}
	
	
	
	public static void SimilarityDataIdentificationJob()
	{
		JobSpecification spec = new JobSpecification();
		HyracksJobEx.initNC();
		UserDef[] ud_array=UserDef.GetUserDef();
		final int NodeNumber=HyracksJobEx.NC_ID_array.length;
		final int Col_Number=DataInteDis.Col_Number;
		FileSplit[] inputFiles=new FileSplit[NodeNumber];
		for(int i=0;i<NodeNumber;i++)
			inputFiles[i]=new FileSplit(HyracksJobEx.NC_ID_array[i],new FileReference(new File(DataInteDis.DataDir,DataCleaner.ValueValidFileName)));
		
		IFileSplitProvider splitProvider = new ConstantFileSplitProvider(inputFiles);
       ISerializerDeserializer[] isd=new ISerializerDeserializer[Col_Number];
       for(int i=0;i<Col_Number;i++)
           isd[i]=UTF8StringSerializerDeserializer.INSTANCE;
       RecordDescriptor desc = new RecordDescriptor(isd);
       IValueParserFactory[] ipf=new IValueParserFactory[Col_Number];
        for(int i=0;i<Col_Number;i++)
        	 ipf[i]=UTF8StringParserFactory.INSTANCE;
        FileScanOperatorDescriptor Scanner = new FileScanOperatorDescriptor(
                spec,
                splitProvider,
                new DelimitedDataTupleParserFactory(ipf, '|'),
                desc);
        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, Scanner,HyracksJobEx.NC_ID_array);
        
               
        IOperatorDescriptor builder = new NullValueSimilarityComputeOperatorDescriptor(spec,isd,ud_array,new File(DataInteDis.DataDir,CompletedNullValueGramMapFileName).getAbsolutePath());
        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, builder, HyracksJobEx.NC_ID_array);

        
      
        FileSplit[] outputFiles=new FileSplit[NodeNumber];
        for(int i=0;i<NodeNumber;i++)
        	outputFiles[i]=new FileSplit(HyracksJobEx.NC_ID_array[i],new FileReference(new File(DataInteDis.DataDir,PartitionNullValueSetTableFileName)));
        
        IFileSplitProvider outSplits = new ConstantFileSplitProvider(outputFiles);
        IOperatorDescriptor SetTableprinter = new FrameFileWriterOperatorDescriptor(spec, outSplits);
        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, SetTableprinter,HyracksJobEx.NC_ID_array);
        
        spec.connect(new OneToOneConnectorDescriptor(spec), Scanner, 0, builder, 0);
        spec.connect(new OneToOneConnectorDescriptor(spec), builder, 0, SetTableprinter, 0);//生成SetList

        HyracksJobEx.exe_job(spec, "SimilarityDataIdentificationJob");
	}
	
	
	
	
	
	
	/*
	 * 将各个NC输出的HashSet，集成到NC1中进行汇总。
	 * 
	 * */
	public static void HashSetCollectIntegrationJob()
	{
		Group_ByteDataCollectIntegrateJob(HyracksJobEx.NC_ID_array,HashSetBytesFileName,new IIntegrationScanner(){
			@Override
			public IOperatorDescriptor getOperatorDescriptor(JobSpecification spec,String[] FilePaths)
			{
				return new SourceHashSetIntegrationOperatorDescriptor(spec,FilePaths);
			}
		});
	}


	
	
	/*
	 * 将SetTable,使用环形，达到integartion and dispatch的目的
	 * 这个汇总的文件，将用于实体识别的第二步。
	 * 使用了环形的并行模式
	 * */
	public static void SetTableIntegrationDispatchJob()
	{
		Loop_ByteDataIntegrationDispatchJob(HyracksJobEx.NC_ID_array,SetTableBytesFileName,new IIntegrationScanner(){
			@Override
			public IOperatorDescriptor getOperatorDescriptor(JobSpecification spec,String[] FilePaths)
			{
				return new SourceSetTableMergeOperatorDescriptor(spec,FilePaths);
			}
		},SetTableBytesFileName);//SetTable集成后，文件还叫SetTable
	}
	
	
	
	/*
	 * 将整合后的HashSet，去掉size=1的，
	 * 调用Algorithm,获得分配给每个NC的Set组，
	 * 将每组的数据发送过去。
	 * 将每组的SetList发送过去。
	 * 包含两个Job
	 * */
	public static void SplitSetListJob()
	{
		
		HyracksJobEx.initNC();
		final int NodeNumber=HyracksJobEx.NC_ID_array.length;
		//先读取整合后的HashSet
		 ArrayList<HashSet<Integer>> SetList=(ArrayList<HashSet<Integer>>)RWObject.ReadObject(
				 new File(DataInteDis.DataDir,DataCleaner.HashSetBytesFileName).getAbsolutePath());
		 //去掉size=1的set
		 for(int i=SetList.size()-1;i>=0;i--)
		 {
			 HashSet<Integer> set=SetList.get(i);
			 if(set.size()<=1)
				 SetList.remove(i);
		 }
		 ArrayList<HashSet<Integer>>[] SetList_array=Algorithm.OptimizePartition(SetList,NodeNumber);
		 
		 DispatchPartitionDataJob(SetList_array);//需要分发数据
		 DispatchPartitionSetListJob(SetList_array);
	}
	
	private static void DispatchPartitionSetListJob(ArrayList<HashSet<Integer>>[] SetList_array)
	{
		HyracksJobEx.initNC();
	   JobSpecification spec = new JobSpecification();
	   
	   final int NodeNumber=HyracksJobEx.NC_ID_array.length;
	   
	   File[] outputFile = new File[NodeNumber];
       for (int i = 0; i < NodeNumber; i++) {
           outputFile[i] = new File(DataInteDis.DataDir,PartitionSetListFileName);
        }
       
       IOperatorDescriptor outputOp[] = new IOperatorDescriptor[outputFile.length];
       for (int i = 0; i < NodeNumber; i++) 
         {
          
    	   byte[] data=ObjectConvert.ObjectToByteArray(SetList_array[i]);
    	   outputOp[i] = new SourceByteWriterOperatorDescriptor(spec, new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(HyracksJobEx.NC_ID_array[i],
    			   outputFile[i].getAbsolutePath()) }),data );
           PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, outputOp[i], HyracksJobEx.NC_ID_array[i]);
         }
       HyracksJobEx.exe_job(spec, "SetListSplitJob");
       
	}
	
	private static void DispatchPartitionDataJob(ArrayList<HashSet<Integer>>[] SetList_array)
	{
		
		HyracksJobEx.initNC();
      JobSpecification spec = new JobSpecification();

      final int NodeNumber=HyracksJobEx.NC_ID_array.length;
		final int Col_Number=DataInteDis.Col_Number;
		
       File[] outputFile = new File[NodeNumber];
       for (int i = 0; i < NodeNumber; i++) {
           outputFile[i] = new File(DataInteDis.DataDir,PartitionSetDataFileName);
        }
       IFileSplitProvider splitProvider = new ConstantFileSplitProvider(new FileSplit[] {
               new FileSplit(HyracksJobEx.NC_ID_array[0], new FileReference(new File(DataInteDis.DataDir,DataInteDis.SumValid_filename))) });//默认在NC1上执行。
       ISerializerDeserializer[] isd=new ISerializerDeserializer[Col_Number];
       for(int i=0;i<Col_Number;i++)
           isd[i]=UTF8StringSerializerDeserializer.INSTANCE;
       RecordDescriptor desc = new RecordDescriptor(isd);
       IValueParserFactory[] ipf=new IValueParserFactory[Col_Number];
       for(int i=0;i<Col_Number;i++)
       	 ipf[i]=UTF8StringParserFactory.INSTANCE;
       FileScanOperatorDescriptor Scanner = new FileScanOperatorDescriptor(
               spec,
               splitProvider,
               new DelimitedDataTupleParserFactory(ipf, '|'),
               desc);

       PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, Scanner, HyracksJobEx.NC_ID_array[0]);

       IOperatorDescriptor splitOp = new DataSplitOperatorDescriptor(spec, desc,SetList_array ,NodeNumber);
       PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, splitOp, HyracksJobEx.NC_ID_array[0]);

       IOperatorDescriptor outputOp[] = new IOperatorDescriptor[outputFile.length];
       for (int i = 0; i < NodeNumber; i++) {
           outputOp[i] = new PlainFileWriterOperatorDescriptor(spec, new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(HyracksJobEx.NC_ID_array[i],
                   outputFile[i].getAbsolutePath()) }), "|");
           PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, outputOp[i], HyracksJobEx.NC_ID_array[i]);
         }

       spec.connect(new OneToOneConnectorDescriptor(spec), Scanner, 0, splitOp, 0);
       for (int i = 0; i < NodeNumber; i++) {
           spec.connect(new OneToOneConnectorDescriptor(spec), splitOp, i, outputOp[i], 0);
         }
       
       HyracksJobEx.exe_job(spec, "SetDataSplitJob");

	}


	/*
	 * 实体识别最后一步：
	 * 每台NC根据分到的SetList，以及SetTable，产生最后的重复记录
	 * 输出为ArrayList<HashSet<Integer>> 每一项记录着重复记录的ID
	 * */
	public static void EntityIdentificationJob()
	{
		HyracksJobEx.initNC();
	   JobSpecification spec = new JobSpecification();
	   
	   final int NodeNumber=HyracksJobEx.NC_ID_array.length;
	   final double Threshold=UDColumInfo.GetEntitySetThreshold();
	
       IOperatorDescriptor builder[] = new IOperatorDescriptor[NodeNumber];
       for (int i = 0; i < NodeNumber; i++) 
         {
    	   builder[i] = new SourceMatchOperatorDescriptor(spec,new File(DataInteDis.DataDir, PartitionSetListFileName).getAbsolutePath(),
    			   new File(DataInteDis.DataDir, SetTableBytesFileName).getAbsolutePath(),Threshold);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, builder[i], HyracksJobEx.NC_ID_array[i]);
         }
       

       IOperatorDescriptor printer[]=new IOperatorDescriptor[NodeNumber];
       
       FileSplit[] outputFiles=new FileSplit[NodeNumber];
       for(int i=0;i<NodeNumber;i++)
       	outputFiles[i]=new FileSplit(HyracksJobEx.NC_ID_array[i],new FileReference(new File(DataInteDis.DataDir,PartitionEntityIDListFileName)));
       
       IFileSplitProvider outSplits = new ConstantFileSplitProvider(outputFiles);
       
       for(int i=0;i< NodeNumber;i++)
         {
    	   	 printer[i]=new FrameFileWriterOperatorDescriptor(spec, outSplits);
    	   	 PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer[i], HyracksJobEx.NC_ID_array[i]);
         }
       for(int i=0;i<NodeNumber;i++)
    	   spec.connect(new OneToOneConnectorDescriptor(spec), builder[i],0,printer[i],0);       
       
       HyracksJobEx.exe_job(spec, "EntityIdentificationJob");
	}
	
	/*
	 * 使用环形并行的方式
	 * 将每台NC上的部分EntityList进行Integration和Dispatch
	 * */
	public static void EntityListIntegrationDispatchJob()
	{		
		Loop_ByteDataIntegrationDispatchJob(HyracksJobEx.NC_ID_array,PartitionEntityIDListFileName,new IIntegrationScanner(){
			@Override
			public IOperatorDescriptor getOperatorDescriptor(JobSpecification spec,String[] FilePaths)
			{
				return new SourceSimpleSetListIntegrationOperatorDescriptor(spec,FilePaths);
			}
		},CompletedEntityIDListFIleName);
		
	}
	
	
	public static void CorrectByteDataIntegrationDispatchJob()
	{		
		Loop_ByteDataIntegrationDispatchJob(HyracksJobEx.NC_ID_array,PartitionCorrectEntityDataByteFileName,new IIntegrationScanner(){
			@Override
			public IOperatorDescriptor getOperatorDescriptor(JobSpecification spec,String[] FilePaths)
			{
				return new SourceStringListIntegrationOperatorDescriptor(spec,FilePaths);
			}
		},CompletedCorrectEntityDataByteFileName);
		
	}
	
	
	
	
	/*
	 * 不一致检测与修复：
	 * 在完成实体识别后，对每个实体识别的组进行不一致检测与修复
	 * */
	public static void InconsistencyDectecAndFixJob() throws HyracksDataException
	{

		JobSpecification spec = new JobSpecification();
		HyracksJobEx.initNC();
		UserDef[] ud_array=UserDef.GetUserDef();
		FileSplit[] splitdata=new FileSplit[HyracksJobEx.NC_ID_array.length];
		for(int i=0;i<HyracksJobEx.NC_ID_array.length;i++)
		{
			splitdata[i]=new FileSplit(HyracksJobEx.NC_ID_array[i],new FileReference(new File(DataInteDis.DataDir,PartitionSetDataFileName).getAbsoluteFile()));
		}

		ISerializerDeserializer[] isd=new ISerializerDeserializer[DataInteDis.Col_Number];
		for(int i=0;i<DataInteDis.Col_Number;i++)
		    isd[i]=UTF8StringSerializerDeserializer.INSTANCE;
		RecordDescriptor desc = new RecordDescriptor(isd);
		IValueParserFactory[] ipf=new IValueParserFactory[DataInteDis.Col_Number];
		for(int i=0;i<DataInteDis.Col_Number;i++)
			 ipf[i]=UTF8StringParserFactory.INSTANCE;
		//定义Scanner
		FileScanOperatorDescriptor Scanner = new FileScanOperatorDescriptor(
			        spec,
			        new ConstantFileSplitProvider(splitdata),
			        new DelimitedDataTupleParserFactory(ipf, '|'),
			        desc);	
		PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, Scanner,HyracksJobEx.NC_ID_array);
		
		//定义Operator
		IOperatorDescriptor fixer=new InconsistencyFixOperatorDescriptor(spec,isd,ud_array,new File(DataInteDis.DataDir,PartitionEntityIDListFileName).getAbsolutePath());
		PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, fixer, HyracksJobEx.NC_ID_array);
		
		//连接Scanner与Operator
		spec.connect(new OneToOneConnectorDescriptor(spec), Scanner, 0,fixer, 0);
		
		//定义Writer
		FileSplit[] outFileSplit=new FileSplit[HyracksJobEx.NC_ID_array.length];
		for(int i=0;i<HyracksJobEx.NC_ID_array.length;i++)
		{
			outFileSplit[i]= new FileSplit(HyracksJobEx.NC_ID_array[i],new FileReference(new File(DataInteDis.DataDir,PartitionCorrectEntityDataFileName)));
		}
		IOperatorDescriptor printer =  new PlainFileWriterOperatorDescriptor(spec, new ConstantFileSplitProvider(outFileSplit), "|");
		PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, HyracksJobEx.NC_ID_array);
		
		//连接Operator与Writer
		spec.connect(new OneToOneConnectorDescriptor(spec), fixer, 0, printer, 0);
		spec.addRoot(printer);
		HyracksJobEx.exe_job(spec, "InconsistencyDectAndFixJob");	
	}
	public static void InconsistencyDectecAndFixByteJob() throws HyracksDataException
	{

		JobSpecification spec = new JobSpecification();
		HyracksJobEx.initNC();
		UserDef[] ud_array=UserDef.GetUserDef();
		FileSplit[] splitdata=new FileSplit[HyracksJobEx.NC_ID_array.length];
		for(int i=0;i<HyracksJobEx.NC_ID_array.length;i++)
		{
			splitdata[i]=new FileSplit(HyracksJobEx.NC_ID_array[i],new FileReference(new File(DataInteDis.DataDir,PartitionSetDataFileName).getAbsoluteFile()));
		}

		ISerializerDeserializer[] isd=new ISerializerDeserializer[DataInteDis.Col_Number];
		for(int i=0;i<DataInteDis.Col_Number;i++)
		    isd[i]=UTF8StringSerializerDeserializer.INSTANCE;
		RecordDescriptor desc = new RecordDescriptor(isd);
		IValueParserFactory[] ipf=new IValueParserFactory[DataInteDis.Col_Number];
		for(int i=0;i<DataInteDis.Col_Number;i++)
			 ipf[i]=UTF8StringParserFactory.INSTANCE;
		//定义Scanner
		FileScanOperatorDescriptor Scanner = new FileScanOperatorDescriptor(
			        spec,
			        new ConstantFileSplitProvider(splitdata),
			        new DelimitedDataTupleParserFactory(ipf, '|'),
			        desc);	
		PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, Scanner,HyracksJobEx.NC_ID_array);
		
		//定义Operator
		IOperatorDescriptor fixer=new InconsistencyFixByteOperatorDescriptor(spec,isd,ud_array,new File(DataInteDis.DataDir,PartitionEntityIDListFileName).getAbsolutePath());
		PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, fixer, HyracksJobEx.NC_ID_array);
		
		//连接Scanner与Operator
		spec.connect(new OneToOneConnectorDescriptor(spec), Scanner, 0,fixer, 0);
		
		//定义Writer
		FileSplit[] outFileSplit=new FileSplit[HyracksJobEx.NC_ID_array.length];
		for(int i=0;i<HyracksJobEx.NC_ID_array.length;i++)
		{
			outFileSplit[i]= new FileSplit(HyracksJobEx.NC_ID_array[i],new FileReference(new File(DataInteDis.DataDir,PartitionCorrectEntityDataByteFileName)));
		}
		IOperatorDescriptor printer =  new FrameFileWriterOperatorDescriptor(spec, new ConstantFileSplitProvider(outFileSplit));
		PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, HyracksJobEx.NC_ID_array);
		
		//连接Operator与Writer
		spec.connect(new OneToOneConnectorDescriptor(spec), fixer, 0, printer, 0);
		spec.addRoot(printer);
		HyracksJobEx.exe_job(spec, "InconsistencyDectAndFixByteJob");	
	}
	
	public static void FinalIntegrationJob() throws HyracksDataException
	{
		
		JobSpecification spec = new JobSpecification();
		HyracksJobEx.initNC();
		FileSplit[] splitdata=new FileSplit[1];
		splitdata[0]=new FileSplit(HyracksJobEx.NC_ID_array[0],new FileReference(new File(DataInteDis.DataDir,CompletedValidFilledFileName).getAbsoluteFile()));

		ISerializerDeserializer[] isd=new ISerializerDeserializer[DataInteDis.Col_Number];
		for(int i=0;i<DataInteDis.Col_Number;i++)
		    isd[i]=UTF8StringSerializerDeserializer.INSTANCE;
		RecordDescriptor desc = new RecordDescriptor(isd);
		IValueParserFactory[] ipf=new IValueParserFactory[DataInteDis.Col_Number];
		for(int i=0;i<DataInteDis.Col_Number;i++)
			 ipf[i]=UTF8StringParserFactory.INSTANCE;
		//定义Scanner
		FileScanOperatorDescriptor Scanner=new FileScanOperatorDescriptor(
			        spec,
			        new ConstantFileSplitProvider(splitdata),
			        new DelimitedDataTupleParserFactory(ipf, '|'),
			        desc);	
		PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, Scanner,HyracksJobEx.NC_ID_array[0]);
		
		//定义Operator
		IOperatorDescriptor integrator=new FinalIntegrationOperatorDescriptor(spec,isd,
				new File(DataInteDis.DataDir,CompletedEntityIDListFIleName).getAbsolutePath(),
				new File(DataInteDis.DataDir,CompletedCorrectEntityDataByteFileName).getAbsolutePath());
		PartitionConstraintHelper.addAbsoluteLocationConstraint(spec,integrator, HyracksJobEx.NC_ID_array[0]);
		
		//连接Scanner与Operator
		spec.connect(new OneToOneConnectorDescriptor(spec), Scanner, 0, integrator, 0);
		
		//定义Writer 输出文本文件
		FileSplit[] txtoutFileSplits=new FileSplit[1];
		txtoutFileSplits[0]=new FileSplit(HyracksJobEx.NC_ID_array[0],new FileReference(new File(DataInteDis.DataDir,FinalOutPutFileName)));

		IOperatorDescriptor txtprinter = new PlainFileWriterOperatorDescriptor(spec, new ConstantFileSplitProvider(txtoutFileSplits), "|");
		PartitionConstraintHelper.addAbsoluteLocationConstraint(spec,txtprinter, HyracksJobEx.NC_ID_array[0]);
			
		//连接Operator与Writer
		spec.connect(new OneToOneConnectorDescriptor(spec), integrator, 0, txtprinter, 0);
		HyracksJobEx.exe_job(spec, "NullValueFillJob");	
		
		
	}
	
	
	public static void CleanUpJob()
	{
		JobSpecification spec = new JobSpecification();
		HyracksJobEx.initNC();
		IOperatorDescriptor NodeCleaner=new NodeCleanUpOperatorDescriptor(spec);
		PartitionConstraintHelper.addAbsoluteLocationConstraint(spec,NodeCleaner, HyracksJobEx.NC_ID_array);
		spec.addRoot(NodeCleaner);
		HyracksJobEx.exe_job(spec, "NodeCleanUpJob");
		
	}
	
	
}
