package com.bw.test4;

import org.apache.flink.types.Row;

import com.alibaba.alink.operator.batch.BatchOperator;
import com.alibaba.alink.operator.batch.evaluation.EvalClusterBatchOp;
import com.alibaba.alink.operator.batch.source.MemSourceBatchOp;
import com.alibaba.alink.operator.common.evaluation.ClusterMetrics;
import org.junit.Test;

import java.util.Arrays;
import java.util.List;

public class EvalClusterBatchOpTest {
	@Test
	public void testEvalClusterBatchOp() throws Exception {
		BatchOperator.setParallelism(1);
		List <Row> df = Arrays.asList(
			Row.of(0, "0 0 0"),
			Row.of(0, "0.1,0.1,0.1"),
			Row.of(0, "0.2,0.2,0.2"),
			Row.of(1, "9 9 9"),
			Row.of(1, "9.1 9.1 9.1"),
			Row.of(1, "9.2 9.2 9.2")
		);
		BatchOperator <?> inOp = new MemSourceBatchOp(df, "id int, vec string");

		// 聚类评估指标
		ClusterMetrics metrics = new EvalClusterBatchOp().setVectorCol("vec").setPredictionCol("id").linkFrom(inOp)
			.collectMetrics();

		System.out.println("Total Samples Number:" + metrics.getCount());
		System.out.println("Cluster Number:" + metrics.getK());
		System.out.println("Cluster Array:" + Arrays.toString(metrics.getClusterArray()));
		System.out.println("Cluster Count Array:" + Arrays.toString(metrics.getCountArray()));
		System.out.println("CP:" + metrics.getCp());
		System.out.println("DB:" + metrics.getDb());
		System.out.println("SP:" + metrics.getSp());
		System.out.println("SSB:" + metrics.getSsb());
		System.out.println("SSW:" + metrics.getSsw());
		System.out.println("CH:" + metrics.getVrc());
		System.out.println("ARI:" + metrics.getAri());// 返回Null ,todo
	}
}