package com.exd.flink.stream;

import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

import com.exd.flink.common.OutputVdataTagBase;
import com.exd.flink.source.StreamSourceInter;
import com.exd.flink.stream.flow.BdcmVdataStream;
import com.exd.flink.stream.flow.VcomputeStream;
import com.exd.flink.stream.flow.VdcmVdataStream;
import com.exd.flink.stream.function.ShuntStreamFunction;

/**
 * 描述: kafka消费入口类
 * 创建人: 赵兴炎
 * 日期: 2022年9月13日
 */
public class StreamStartBean {
	
	public static void main(String[] args) throws Exception {
		// 创建流式读取对象
		StreamExecutionEnvironment streamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment();
//		streamExecutionEnvironment.setParallelism(1);
		// flink整合kafka数据源
//		DataStreamSource<String> stream = StreamSourceInter.localFileSource("D:\\\\vcompute.txt",streamExecutionEnvironment);
//		DataStreamSource<String> stream = StreamSourceInter.localFileSource(streamExecutionEnvironment);
		DataStreamSource<String> stream = StreamSourceInter.kafkaSource(streamExecutionEnvironment);
		// 设置固定的消费数据，保证flink的消费平稳,对不同的数据进行分流
		SingleOutputStreamOperator<String> windowFunction = stream.countWindowAll(10000).process(new ShuntStreamFunction());
		// 获取分流的数据，传递数据，进行数据解析
		BdcmVdataStream.analyticalData(windowFunction.getSideOutput(OutputVdataTagBase.BDCM_VDATA));
		VdcmVdataStream.analyticalData(windowFunction.getSideOutput(OutputVdataTagBase.VDCM_VDATA));
//		// 解析vcpmpute数据
		DataStream<String> bdcmComputeData = windowFunction.getSideOutput(OutputVdataTagBase.BDCM_VCOMPUTE);
		DataStream<String> vdcmComputeData = windowFunction.getSideOutput(OutputVdataTagBase.VDCM_VCOMPUTE);
		VcomputeStream.analyticalData(bdcmComputeData,vdcmComputeData);
		streamExecutionEnvironment.execute();
		
	}

}
