package com.atguigu.flink.chapter05.source;

import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.source.SourceFunction;

import java.io.BufferedReader;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.net.Socket;
import java.nio.charset.StandardCharsets;

/**
 * @Author lzc
 * @Date 2022/7/4 11:23
 */
public class Flink04_Source_Custom {
    public static void main(String[] args) {
        Configuration conf = new Configuration();
        conf.setInteger("rest.port", 2000);
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(conf);
        env.setParallelism(1);
    
        env.addSource(new MySocketSource()).print();
    
       
        try {
            env.execute();
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
}


class MySocketSource implements SourceFunction<String>{
    
    // 在这个方法内实现读取数据源
    @Override
    public void run(SourceContext<String> ctx) throws Exception {
        Socket socket = new Socket("hadoop162", 9999);
    
        InputStream is = socket.getInputStream();
    
        BufferedReader reader = new BufferedReader(new InputStreamReader(is, StandardCharsets.UTF_8));
    
        String line = reader.readLine();
        while (line != null) {
            ctx.collect(line);
            line = reader.readLine();
        }
    }
    
    // 在外部可以调用这个方法, 去停止source
    @Override
    public void cancel() {
    
    }
}