package com.bw.test2;

import com.alibaba.alink.common.io.filesystem.FilePath;
import com.alibaba.alink.operator.batch.BatchOperator;
import com.alibaba.alink.operator.batch.dataproc.SplitBatchOp;
import com.alibaba.alink.operator.batch.sink.AkSinkBatchOp;
import com.alibaba.alink.operator.batch.sink.CsvSinkBatchOp;
import com.alibaba.alink.operator.batch.source.AkSourceBatchOp;
import com.alibaba.alink.operator.batch.source.CsvSourceBatchOp;
import com.alibaba.alink.operator.batch.source.MemSourceBatchOp;
import org.junit.Test;

public class CsvSourceBatchOpTest {

	@Test
	public void testCsvSourceBatchOp() throws Exception {
//		String filePath = "https://alink-test-data.oss-cn-hangzhou.aliyuncs.com/iris.csv";

		// 数据导入
		String filePath = "data/iris.csv";
		String schema
			= "sepal_length double, sepal_width double, petal_length double, petal_width double, category string";
		CsvSourceBatchOp csvSource = new CsvSourceBatchOp()
			.setFilePath(filePath)
			.setSchemaStr(schema)
			.setFieldDelimiter(",");


//		csvSource.print();

		// 2 8 拆分
//		BatchOperator<?> spliter = new SplitBatchOp().setFraction(0.8);
//		spliter.linkFrom(csvSource);
//		// 主流打印
//		BatchOperator<?> trainData = spliter.lazyPrint(-1);
//
//		// 侧流
//		BatchOperator<?> TestData = spliter.getSideOutput(0);
//
//		// 执行
//		BatchOperator.execute();
//		System.out.println("trainData.count() = " + trainData.count());
//		System.out.println("TestData = " + TestData.count());



		// 数据导出
		CsvSinkBatchOp csvSink = new CsvSinkBatchOp()
				.setFilePath("data/csv_test.csv")
				.setOverwriteSink(true);
		csvSource.link(csvSink);
//
//		// 需要执行
//		BatchOperator.execute();


		// 数据导出ak格式
//		csvSource.link(new AkSinkBatchOp()
//				.setFilePath("data/csv_test.ak")
//				.setOverwriteSink(true)
//				.setNumFiles(1));
//		BatchOperator.execute();


		// 输入导入ak
//		String filePath1 = "data/csv_test.ak";
//		AkSourceBatchOp akSourceBatchOp = new AkSourceBatchOp().setFilePath(new FilePath(filePath1));
//		akSourceBatchOp.print();


	}
}