package com.atguigu.day02

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment
import org.apache.flink.streaming.api.functions.source.{RichParallelSourceFunction, SourceFunction}

import java.sql.Timestamp
import scala.util.Random


object Example2 {
  def main(args: Array[String]): Unit = {
    val env = StreamExecutionEnvironment.getExecutionEnvironment
    env.setParallelism(1)

    val eventDateStream = env.addSource(new RichParallelSourceFunction[Event] {
      private val nameArr = Array("zhang3", "li4", "wang5")
      private val urlArr = Array("baidu.com", "jd.com", "google.com", "/home")
      private val random = new Random
      private var flag = true

      override def run(sourceContext: SourceFunction.SourceContext[Event]): Unit = {
        while (flag) {
          sourceContext.collect(new Event(
            nameArr(random.nextInt(nameArr.length))
            , urlArr(random.nextInt(urlArr.length))
            , System.currentTimeMillis()
          ))
          Thread.sleep(500)
        }
      }

      override def cancel(): Unit = {
        flag = false
      }
    })

    eventDateStream
      .map(r=>r.name)
      .print()


    env.execute()
  }


}

