package com.book.flink

import java.util.Properties

import org.apache.flink.api.common.serialization.SimpleStringSchema


//import org.apache.flink.streaming.api.CheckpointingMode
import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer
import org.apache.flink.api.scala._

object BookMessageJob {
  def main(args: Array[String]): Unit = {
    val env = StreamExecutionEnvironment.getExecutionEnvironment

    //配置
//    env.enableCheckpointing(4000)
//    env.getCheckpointConfig.setCheckpointingMode(CheckpointingMode.EXACTLY_ONCE)
//    env.getCheckpointConfig.setMaxConcurrentCheckpoints(1)
    val topic = "msg_topic"
    val properties = new Properties()


    //配置kafka
    properties.setProperty("bootstrap.servers", "127.0.0.1:9092")
    properties.setProperty("group.id", "mst_topic")

    val  data = env.addSource(new FlinkKafkaConsumer[String](topic, new SimpleStringSchema(), properties))

    //httpclient
    //data.print()
    data.addSink(new HttpSinkFunction())

    env.execute("BookMessageJob")
  }



}
