package com.yeming.flink.mudlog.isolationforest

import com.yeming.flink.practice.BatchWordCount.getClass
import org.apache.flink.api.scala._
import org.apache.flink.util.OutputTag

object IsolationTest {

  /**
   * 数据读取和测试
   *
   * @param args
   */
  def main(args: Array[String]): Unit = {
    val env = ExecutionEnvironment.getExecutionEnvironment
    env.setParallelism(1)
    //use relative path to get resource
    val url = getClass.getResource("/data/C1.las")
    //读取路径总数居
    val totalDataSet: DataSet[String] = env.readTextFile(url.getPath)
      .filter(line => line.startsWith("  "))

    //生成训练数据
    val sampleDataSet = totalDataSet.filter(line => !line.contains("-999.250000"))
      .map(line => {
        val strings: Array[String] = line.trim.split("\\s+")
        val doubles: Array[Double] = for (elem <- strings) yield elem.toDouble
        Array[Double](doubles(1), doubles(2), doubles(3), doubles(4), doubles(5))
      })
    //生成测试数据
    val testDataSet = totalDataSet.map(line => {
      val strings: Array[String] = line.trim.split("\\s+")
      val doubles: Array[Double] = for (elem <- strings) yield elem.toDouble
      Array[Double](doubles(1), doubles(2), doubles(3), doubles(4), doubles(5))
    }).first(10)


    val forest = IsolationForest.buildForest(sampleDataSet, numTrees = 15)

    val result_rdd = sampleDataSet.map(row => row ++ Array(forest.predict(row)))

    //保存结果到文件中。
    result_rdd.map(lines => lines.mkString(",")).writeAsText("file:///E:/tmp/result_label")

    testDataSet.print("测试数据")
    //测试是个数据结果
    val local_rows = testDataSet.collect()
    for (row <- local_rows) {
      println(row.mkString(","))
      println("ForestScore", forest.predict(row))
    }
    println("Finished Isolation")
  }

}
