package flink_p1

import org.apache.flink.api.common.functions.RichMapFunction
import org.apache.flink.api.common.serialization.SimpleStringSchema
import org.apache.flink.configuration.Configuration
import org.apache.flink.streaming.api.scala.{DataStream, StreamExecutionEnvironment, createTypeInformation}
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer
import org.apache.kafka.clients.consumer.ConsumerConfig
import redis.clients.jedis.Jedis

import java.util.Properties

object FlinkTest_09_RichFunction {

  def main(args: Array[String]): Unit = {


    /**
     * 统计车流量
     */

    val env: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment

    val prop = new Properties()
    prop.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "127.0.0.1:9092")
    prop.setProperty(ConsumerConfig.GROUP_ID_CONFIG, "group1")


    val source = new FlinkKafkaConsumer("flink-test", new SimpleStringSchema(), prop)


    val kafkaSource: DataStream[String] = env.addSource(source)
    //将车流量写入Redis


    // 通过map显然不合适
    //    val res1: DataStream[(String, Int)] = kafkaSource.map(data => {
    //      val carId: String = data.split("\t")(1)
    //
    //      //连接redis
    //      //写数据
    //      //关闭连接
    //    })


    kafkaSource.map(new RichMapFunction[String, Long] {

      var jedis: Jedis = null

      override def open(parameters: Configuration): Unit = {

        val taskName: String = getRuntimeContext.getTaskName
        val subtasks: String = getRuntimeContext.getTaskNameWithSubtasks
        println("taskName:" + taskName)
        println("subtasks:" + subtasks)

        jedis = new Jedis("127.0.0.1", 6379)
      }

      override def close(): Unit = {

        if (jedis != null) {
          jedis.close()
        }

      }

      override def map(in: String): Long = {
        jedis.incr(in)
      }
    }).print()


    env.execute("kafka1")

  }

}
