package com.fudan.run.trans;

import java.util.List;
import java.util.Map;

import org.apache.spark.api.java.JavaPairRDD;

import com.fudan.cfg.trans.FlatRpcTransDef;
import com.fudan.cfg.trans.MergeNodeDef;
import com.fudan.run.JobRunner;
import com.fudan.run.VariableRddMap;
import com.fudan.run.ctx.annotation.TransAdapter;

import scala.Tuple2;

@TransAdapter("merge_node")
public class MergeNodeTrans extends TransHandler<MergeNodeDef>{

	@Override
	public void trans(JobRunner jobRunner, MergeNodeDef transNode, VariableRddMap variableMap) {
		List<String> targets = transNode.getTargets();
		JavaPairRDD<Tuple2<String, Map<String, Object>>, Tuple2<Iterable<Map<String, Object>>, Iterable<Map<String, Object>>>> tempRdd = null;
		for(int i = 1;i<targets.size();i++) {
			String pre = targets.get(i-1);
			String tar = targets.get(i);
			if(tempRdd == null) {
				tempRdd = variableMap.getRdd(pre).groupBy(record->{
					String root = String.valueOf(record.get(transNode.getRootKey()));
					return Tuple2.apply(root, record);
				})
						.filter(l->{return !l._1._1.equals("null");})
						.join(variableMap.getRdd(tar).groupBy(record->{
							String root = String.valueOf(record.get(transNode.getRootKey()));
							return Tuple2.apply(root, record);
						})
								.filter(l->{return !l._1._1.equals("null");}));
			}else {
				//tempRdd.join()
			}
		}
	}

}
