package EastMoney

import org.apache.flink.api.common.serialization.SimpleStringSchema
import org.apache.flink.streaming.api.scala._
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer
import util.RandDomData

import java.util.Properties

object StockCodeDeduplication {
  def main(args: Array[String]): Unit = {
    val env = StreamExecutionEnvironment.getExecutionEnvironment
    val properties = new Properties()
    properties.setProperty("bootstrap.servers","node1:9092")
    properties.setProperty("taskmanager.memory.network.fraction", "0.2")
    properties.setProperty("taskmanager.memory.network.min", "128000")
    properties.setProperty("taskmanager.memory.network.max", "268435456")

    env.addSource(new RandDomData)
      .map(data=>((data.code,data),1))
      .keyBy(_._1._1)
      .reduce((r1,r2)=>((r1._1._1,r1._1._2),r1._2+r2._2))
      .map(data=>(data._2,data._1._2))
      .filter(_._1<=1)
      .map(_._2.toString)
      .addSink(new FlinkKafkaProducer[String]("stock_codes",new SimpleStringSchema(),properties))

    env.addSource(new RandDomData)
      .map(data=>((data.code,data),1))
      .keyBy(_._1._1)
      .reduce((r1,r2)=>((r1._1._1,r1._1._2),r1._2+r2._2))
      .map(data=>(data._2,data._1._2))
      .filter(_._1<=1)
      .map(_._2.toString)
      .print()

    env.execute()
  }
}
