package com.qing.write

import java.io.{File, PrintWriter}

import org.apache.spark.{SparkConf, SparkContext}

import scala.io.Source

/**
  * Created by wuliao on 2017/8/25.
  */
object Test {
  def main(args: Array[String]): Unit = {

    //    System.setProperty("user.name", "root")
    //    val conf = new SparkConf()
    //      .setAppName("hello")
    //      //      .setJars(Array("file:///mnt/disk/jar/spark-lucene-1.0-SNAPSHOT-jar-with-dependencies.jar"))
    //      //      .setJars(Array("file:///Users/wuliao/IdeaProjects/sparklucene/target/spark-lucene-1.0-SNAPSHOT-jar-with-dependencies.jar"))
    //      //      .setMaster("local")
    //      .set("SPARK_EXECUTOR_CORES","1")
    //      .setMaster("spark://175.102.18.112:7077")
    //    val sc = new SparkContext(conf)
    //    println(sc.parallelize(Seq("...asdasdasd asdasd asfsdklfjslfkd")).count())
    val s = "ssss"
    println(Option(s).get+"....")

  }
}
