package com.tl.spark.scala

import org.apache.spark.internal.Logging
import org.apache.spark.{SparkConf, SparkContext}
import org.slf4j.{Logger, LoggerFactory}

/**
  * @program: spark-test
  * @description:
  * @author: dong.tl
  * @create: 2018-09-25 10:00
  **/
object ERDF extends Logging{
  def main(args: Array[String]) {


    val ip = System.getenv("SPARK_LOCAL_IP")

    println(ip)

    val logger:Logger=LoggerFactory.getLogger(ERDF.getClass)
//    val conf = new SparkConf().setAppName("Spark Pi").setMaster("spark://db01:7077")
//      .setJars(Seq("F:\\IdeaProjects\\spark-test\\target\\spark-test-1.0-SNAPSHOT.jar"))
//    val spark = new SparkContext(conf)
//    val slices = if (args.length > 0) args(0).toInt else 4
//    val n = 10000 * slices
//    val count = spark.parallelize(1 to n, slices).map { i =>
//      val x = Math.random * 2 - 1
//      val y = Math.random * 2 - 1
//      println("--------------------------------------"+x+":"+y)
//      logger.info("--------------------------------------"+x+":"+y)
//      logInfo("---------123-----------------------------"+x+":"+y)
//      if (x * x + y * y < 1) 1 else 0
//    }.reduce(_ + _)
//    logInfo("----------------------------------------------------------")
//    logger.error("====================123123========================")
//    println("Pi is roughly " + 4.0 * count / n)
//    spark.stop()
  }
}
