package cn.itcast.task;

import cn.itcast.bean.CleanBean;
import cn.itcast.config.QuotConfig;
import cn.itcast.function.IndexMinWindowFunction;
import cn.itcast.function.KeyFunction;
import cn.itcast.function.StockMinWindowFunction;
import cn.itcast.inter.ProcessDataInterface;
import cn.itcast.map.IndexPutHdfsMap;
import cn.itcast.map.StockPutHdfsMap;
import org.apache.flink.api.common.serialization.SimpleStringEncoder;
import org.apache.flink.connector.file.sink.FileSink;
import org.apache.flink.core.fs.Path;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.functions.sink.filesystem.OutputFileConfig;
import org.apache.flink.streaming.api.functions.sink.filesystem.bucketassigners.DateTimeBucketAssigner;
import org.apache.flink.streaming.api.functions.sink.filesystem.rollingpolicies.DefaultRollingPolicy;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;

import java.util.concurrent.TimeUnit;

/**
 * 指数分时数据备份
 */
public class IndexMinHdfsTask implements ProcessDataInterface {
    @Override
    public void process(DataStream<CleanBean> waterData) {

        System.setProperty("HADOOP_USER_NAME","root");

        //1.设置HDFS存储路径
        OutputFileConfig fileConfig = new OutputFileConfig("index", ".txt");
        //4.设置文件流对象
        FileSink<String> sink = FileSink
                .forRowFormat(new Path(QuotConfig.config.getProperty("index.sec.hdfs.path")), new SimpleStringEncoder<String>("UTF-8"))
                .withRollingPolicy(
                        DefaultRollingPolicy.builder()
//                                .withRolloverInterval(TimeUnit.MINUTES.toMillis(1))
                                .withRolloverInterval(TimeUnit.SECONDS.toSeconds(10))
                                .withInactivityInterval(TimeUnit.MINUTES.toMillis(1))
                                .withMaxPartSize(1024 * 1024 * 1024)
                                .build())
                .withOutputFileConfig(fileConfig)
                .withBucketAssigner(new DateTimeBucketAssigner())
                .build();


        //3.数据分组
        waterData.keyBy(new KeyFunction())
                //4.划分时间窗口
                .window(TumblingEventTimeWindows.of(Time.seconds(5)))
                //5.数据处理
                .apply(new IndexMinWindowFunction())
                //6.转换并封装数据
                .map(new IndexPutHdfsMap())
                //7.写入HDFS
                .sinkTo(sink);

    }
}
