package com.learn.lb.spark.sql.work

import org.apache.spark.sql.SparkSession

import scala.collection.mutable.ArrayBuffer

/**
 * 根据节点过滤
 *
 * @author laibo
 * @since 2019/8/30 16:29
 */
object FilterScoreSpark {

  def main(args: Array[String]): Unit = {
    val sparkSession = SparkSession.builder().appName("FilterScoreSpark").master("local[3]").getOrCreate()
    val testData = new ArrayBuffer[TestData]()
    for (i <- 0 to 1000000) {
      testData += TestData("z" + i, Math.random() * 10000)
    }
  }

  case class TestData(name: String, score: Double)

}
