package day03;

import org.apache.flink.api.common.functions.util.PrintSinkOutputWriter;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.sink.RichSinkFunction;
import org.apache.flink.streaming.api.functions.source.SourceFunction;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

import java.io.PrintWriter;
import java.text.SimpleDateFormat;
import java.util.Date;

public class HdfsSink {

    //hdfs写出数据的时候一定要保证数据在一个文件夹中，按照日期，并且这个日期中还有子文件
    //3级分区 分区不能太多 对namenode压力太大
    public  static class HdfsSinkTest extends RichSinkFunction<Tuple2<String,String>> {
        private String url;
        private FileSystem fs;
        private SimpleDateFormat df;
        private int current_thread;
        private static  final String prefix="flink_hdfs_sink_";
//        private FSDataOutputStream os;
//        private PrintSinkOutputWriter pw;

        public HdfsSinkTest(String url) {
            this.url = url;
        }

        @Override
        public void close() throws Exception {
            fs.close();
        }
        @Override
        public void open(Configuration parameters) throws Exception {
            current_thread=getRuntimeContext().getIndexOfThisSubtask();
            Path path =new Path(url);
            fs=FileSystem.get(new org.apache.hadoop.conf.Configuration());
            df=new SimpleDateFormat("yyyy/MM/dd");
        }
        @Override//并行度
        public void invoke(Tuple2<String, String> value, Context context) throws Exception {
            //一条调用一次
            String dareStr = df.format(new Date());
            String allPath=url+"/"+dareStr;//今天的文件写出路径
            Path path= new Path(allPath+"/"+prefix+current_thread+".txt");
            FSDataOutputStream os=null;
            if (!fs.exists(path)){
                os=fs.create(path);
            }else {
                os=fs.append(path);
            }
            PrintWriter printWriter = new PrintWriter(os);
            printWriter.println(value);

            printWriter.close();
            os.close();
        }

    }

    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.createLocalEnvironmentWithWebUI(new Configuration());
        env.addSource(new SourceFunction<Tuple2<String,String>>() {
            Boolean flag =true;
            @Override
            public void run(SourceContext<Tuple2<String, String>> ctx) throws Exception {
                int i=0;
                while(flag){
                    ctx.collect(Tuple2.of("hainiu",i+""));
                    Thread.sleep(500);
                    i++;
                }
            }

            @Override
            public void cancel() {
                flag=false;

            }
        }).addSink(new HdfsSinkTest("/user/wangy33/flink/"));

        env.execute();
    }
}
