package com.sugon.ohdfs.integration.flink.job.stream.sink;

import com.sugon.ohdfs.integration.flink.domain.TestItem;
import com.sugon.ohdfs.integration.flink.util.SHA256Utils;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.configuration.GlobalConfiguration;
import org.apache.flink.core.fs.Path;
import org.apache.flink.formats.sequencefile.SequenceFileWriterFactory;
import org.apache.flink.runtime.util.HadoopUtils;
import org.apache.flink.streaming.api.functions.sink.filesystem.StreamingFileSink;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.Text;
import org.springframework.beans.factory.annotation.Value;

import java.io.Serializable;

public class BulkSequenceStreamSinkJob extends AbstractStreamSinkJob{

    @Value("${hdfs.base-path:hdfs://10.11.8.29:9001/flink/sinkTest/stream/sink/bulk/sequence}")
    String basePath;

    @Override
    protected Serializable transform(TestItem item) {
         Tuple2<Text, Text> result = new Tuple2<>();
         String content = item.toString();
         result.setFields(new Text(SHA256Utils.getSHA256(content)), new Text(content));
         return result;
    }

    @Override
    protected StreamingFileSink buildSink() {
        Configuration hadoopConf = HadoopUtils.getHadoopConfiguration(GlobalConfiguration.loadConfiguration());
        return StreamingFileSink
                .<TestItem>forBulkFormat(new Path(basePath), new SequenceFileWriterFactory(hadoopConf, Text.class, Text.class))
                .build();
    }
}
