package RDD_homework
import java.io.{File, PrintWriter}
import org.apache.spark._
import org.apache.spark.SparkContext._
import org.apache.spark.streaming.{Seconds,StreamingContext}


object eight_work {
  def main(args: Array[String]): Unit = {
    first()
  }

  def first(): Unit ={
    val strList = List(
      "There are three famous bigdata softwares",
      "and they are widely used in real applications",
      "For in that sleep of death what dreams may come",
      "The slings and arrows of outrageous fortune",
      "When we have shuffled off this mortal coil",
      "For who would bear the whips and scorns of time",
      "That patient merit of the unworthy takes",
      "When he himself might his quietus make",
      "To grunt and sweat under a weary life",
      "But that the dread of something after death",
      "And makes us rather bear those ills we have",
      "Than fly to others that we know not of",
      "Thus conscience does make cowards of us all",
      "And thus the native hue of resolution",
      "And enterprises of great pith and moment",
      "And lose the name of action"
    )
    var i = 0
    while (i < 100000){
      Thread.sleep(scala.util.Random.nextInt(5000))
      var str = scala.util.Random.nextInt(1000).toString
      val filePath = "E:\\spark\\spark_start\\data/out"+str+".txt"
      val out = new PrintWriter(new File(filePath))
      for (m <- 0 to 3) out.println(strList(scala.util.Random.nextInt(14)))
      out.close
      i = i + 1
    }
  }
}
