package com.chenjj.bigdata.flink.book


import org.apache.flink.api.scala.{DataSet, ExecutionEnvironment, _}
import org.apache.flink.streaming.api.scala.{DataStream, StreamExecutionEnvironment}
import org.junit.{Assert, Test}


/**
  * 第2章：环境准备
  */
@Test
class FlinkTesterChapter2 extends  Assert{
  val benv = ExecutionEnvironment.getExecutionEnvironment
  val senv = StreamExecutionEnvironment.getExecutionEnvironment
  /**
    * 使用DataSet对单词统计
    */
  @Test
  def test1: Unit ={
   val textBatch:DataSet[String] = benv.fromElements("123 4-5 To be or not to be,--that is the question:--","Whether'tis nobler in the mind to suffer")
   //  \\W匹配所有非单词的字符
   val count = textBatch.flatMap(_.toLowerCase.split("\\W+"))
                        .map((_,1))
                        .groupBy(0)
                        .sum(1)
    count.print()

  }

  /**
    * 使用DataStream对单词统计
    */
  @Test
  def test2: Unit ={
    val textStreaming:DataStream[String] = senv.fromElements("123 4-5 To be or not to be,--that is the question:--"," Whether'tis nobler in the mind to suffer")
    val count  = textStreaming.flatMap{ _.toLowerCase.split("\\W+")}
                          .map((_,1))
                          .keyBy(0)
                          .sum(1)
    count.print()

    senv.execute("Streaming Wordcount")
  }
}
