package com.shujia.process;

import com.shujia.flinkcore.StudentsWindow;
import javafx.scene.shape.HLineTo;
import org.apache.commons.compress.archivers.dump.DumpArchiveEntry;
import org.apache.flink.api.common.typeinfo.Types;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.util.Collector;

import java.util.Arrays;
import java.util.List;

public class ProcessDemo1 {
    public static void main(String[] args)throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        DataStreamSource<String> src = env.socketTextStream("hadoop102", 8888);

        src.map(line -> Arrays.asList(line.split(",")), Types.LIST(Types.STRING))
                .process(new ProcessFunction<List<String>, Student>() {
                    @Override
                    public void processElement(List<String> strings, ProcessFunction<List<String>, Student>.Context context, Collector<Student> collector) throws Exception {
                        String id = strings.get(0);
                        String name = strings.get(1);
                        int age = Integer.parseInt(strings.get(2));
                        String gender = strings.get(3);
                        String clazz = strings.get(4);
                        collector.collect(new Student(id,name,age,gender,clazz));
                    }
                }).print();
        env.execute();
    }
}
