package day01;

import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

import org.apache.hadoop.fs.FileChecksum;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

import java.io.IOException;

/**
 *  监控一个hdfs文件：如果一个变化了，我们就读取这个文件的新内容
 *  校验和文件
 *  hdfs-->block block_meta //createdata length md5
 *
 *  源
 *  SourceFunction           ParallelSourceFunction（mark）
 *  RichSourceFunction       RichParallelSourceFunction（mark）
 *  open close getRunTimeContext
 *
 */
public class TestUDSource {
    public static void main(String[] args) throws Exception {
//        FileSystem fs = FileSystem.get(new Configuration());
//        FileChecksum fileChecksum = fs.getFileChecksum(new Path("/data/hainiu/nginx_log/201906/20/access_192.168.168.131_20190620175502.log.gz"));
//        //MD5-of-0MD5-of-512CRC32C:5fcadbdbdcbd977d118bc5209b0c5d98
//        System.out.println(fileChecksum.toString());


        StreamExecutionEnvironment env = StreamExecutionEnvironment.createLocalEnvironmentWithWebUI(new Configuration());
        //env.addSource(new HdfsSourceFunction("/user/wangy33/a.txt")).print();
        env.addSource(new RichHdfsSourceFunction("/user/wangy33/a.txt")).print();
        env.execute();
    }
}
