package com.ml4ai.spark.boot

import org.apache.spark.sql.SparkSession
import org.apache.spark.SparkConf
import org.reflections.Reflections

import scala.collection.JavaConversions._

object Boot {

  System.setProperty("hadoop.home.dir", "E:\\soft\\hadoo2.7.3-dll-exe-pdb")

  def main(args: Array[String]): Unit = {
    val reflections = new Reflections("com.ml4ai");
    val rs = reflections.getSubTypesOf[Runnable](classOf[Runnable]).map(clazz => clazz.newInstance())
    for (runnable <- rs) {
      println(runnable)
    }

    val run = new Runnable {
      override def run(): Unit = {

      }
    }

    val conf = new SparkConf()

    conf.setMaster("local[*]")
    conf.setAppName("local[*]")

    val session = SparkSession.builder().config(conf).getOrCreate();
    val sql = session.sqlContext
    val sc = session.sparkContext
    println(sql)
    println(sc)

    val rdd = sc makeRDD Seq("1", "2", "3", "4", "5")
    import session.implicits._
    rdd.toDF("features").show()

  }

}

