package net.bwie.flink;

import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.connector.base.DeliveryGuarantee;
import org.apache.flink.connector.kafka.sink.KafkaRecordSerializationSchema;
import org.apache.flink.connector.kafka.sink.KafkaSink;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

/**
 * 日考2考试
 * @author xuanyu
 * @date 2025/10/16
 */
public class FlinkDemo01 {

	public static void main(String[] args) throws Exception{
		// 2、创建Flink流式环境，设置并行度。（10分）
		StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
		env.setParallelism(1);

		// 3、自定义数据源从元素当中读取，包含如下内容：（商品id,商品类别,商品名称,金额,商品下单个数）。（10分）
		/*
1,玩具,玩具手枪,28888.2,12
2,玩具,遥控汽车,3222.0,5
3,玩具,洋娃娃,68.0,8
4,玩具,遥控飞机,3222.0,6
5,电器,风扇,55.8,10
6,电器,空调,99999.2,3
7,电器,平底锅,666.0,1
8,电器,平底锅,666.0,0
		 */
		DataStreamSource<String> stream = env.socketTextStream("node101", 9999);


		// 4、将数据写入到Kafka的goods_topic主题中（10分）
		KafkaSink<String> sink = KafkaSink.<String>builder()
			.setBootstrapServers("node101:9092,node102:9092,node103:9092")
			.setRecordSerializer(KafkaRecordSerializationSchema.builder()
				.setTopic("goods_topic")
				.setValueSerializationSchema(new SimpleStringSchema())
				.build()
			)
			.setDeliveryGuarantee(DeliveryGuarantee.AT_LEAST_ONCE)
			.build();
		stream.sinkTo(sink);

		// 触发执行
		env.execute("FlinkDemo01") ;
	}
}
