//package com.wudl.flink.stream.sink
//
//import java.util.Properties
//
//import com.wudl.flink.stream.source.StationLog
//import org.apache.flink.api.common.serialization.{SimpleStringEncoder, SimpleStringSchema}
//import org.apache.flink.core.fs.Path
//import org.apache.flink.streaming.api.functions.sink.filesystem.StreamingFileSink
//import org.apache.flink.streaming.api.functions.sink.filesystem.rollingpolicies.DefaultRollingPolicy
//import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment
//import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer
//import org.apache.kafka.common.serialization.StringDeserializer
//
//object KafkaToHdfs {
//  def main(args: Array[String]): Unit = {
//
//    val env: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment
//    env.setParallelism(1)
//    import org.apache.flink.streaming.api.scala._
//    val props = new Properties()
//    props.setProperty("bootstrap.servers", "node01.com:6667")
//    props.setProperty("group.id", "fink01")
//    props.setProperty("key.deserializer", classOf[StringDeserializer].getName)
//    props.setProperty("value.deserializer", classOf[StringDeserializer].getName)
//    props.setProperty("auto.offset.reset", "latest")
//
//    val stream: DataStream[String] = env.addSource(new FlinkKafkaConsumer[String]("flink_topic", new SimpleStringSchema(), props))
//
//    val rolling: DefaultRollingPolicy[String, String] = DefaultRollingPolicy.create()
//      .withMaxPartSize(1024*1024*128) // 设置每个文件的最大大小 ,默认是128M。这里设置为120M
//      .withInactivityInterval(60*1000*10) //不活动的分桶时间
//      .withRolloverInterval(1000*60) //每隔两秒生成一个文件
//      .build() //创建
//
//
//    //创建HDFS的Sink
//    val hdfsSink: StreamingFileSink[String] = StreamingFileSink.forRowFormat[String](
//      new Path("hdfs://node01.com:8020/MySink001/"),
//      new SimpleStringEncoder[String]("UTF-8"))
//      .withRollingPolicy(rolling)
//      .withBucketCheckInterval(1000) //检查间隔时间
//      .build()
//
//
//    stream.addSink(hdfsSink)
//
//    env.execute("consumerFlink")
//
//
//  }
//
//}
