package com.bw.test2;

import org.apache.flink.types.Row;

import com.alibaba.alink.operator.batch.BatchOperator;
import com.alibaba.alink.operator.batch.dataproc.SplitBatchOp;
import com.alibaba.alink.operator.batch.source.MemSourceBatchOp;
import org.junit.Test;

import java.util.Arrays;
import java.util.List;

public class SplitBatchOpTest {
	@Test
    public void testSplitBatchOp() throws Exception {

        // 数据集
        List <Row> df_data = Arrays.asList(
    	    Row.of("Ohio", 2001, 1.7),
    		Row.of("Ohio", 2002, 3.6),
    		Row.of("Nevada", 2001, 2.4),
    		Row.of("Nevada", 2002, 2.9)
   		);

        BatchOperator <?> batch_data = new MemSourceBatchOp(df_data, "f1 string, f2 int, f3 double");
        // 2 8 拆分
        BatchOperator <?> spliter = new SplitBatchOp().setFraction(0.8);

        spliter.linkFrom(batch_data);


        // 主流打印
        BatchOperator<?> trainData = spliter.lazyPrint(-1);

        // 侧流
        BatchOperator<?> TestData = spliter.getSideOutput(0);

        // 执行
        BatchOperator.execute();
        System.out.println("trainData.count() = " + trainData.count());
        System.out.println("TestData = " + TestData.count());
    }
}