from pyflink.common import SimpleStringSchema, WatermarkStrategy, Time, Types
from pyflink.datastream import StreamExecutionEnvironment
from pyflink.datastream.connectors import DeliveryGuarantee
from pyflink.datastream.connectors.kafka import KafkaSource, KafkaOffsetsInitializer, KafkaSink, \
    KafkaRecordSerializationSchema
import json

from pyflink.datastream.window import SlidingProcessingTimeWindows

# 1、创建flink执行环境
env = StreamExecutionEnvironment.get_execution_environment()

# 设置并行度
# env.set_parallelism(1)

# 2、读取数据
source = KafkaSource.builder() \
    .set_bootstrap_servers("master:9092") \
    .set_topics("cars") \
    .set_group_id("my-group") \
    .set_starting_offsets(KafkaOffsetsInitializer.latest()) \
    .set_value_only_deserializer(SimpleStringSchema()) \
    .build()

cars_ds = env.from_source(source, WatermarkStrategy.no_watermarks(), "Kafka Source")

# 统计每个卡口车流量，每隔5秒统计最近15秒

# 1、解析数据
card_ds = cars_ds.map(lambda line: (json.loads(line)["card"], 1))

# 2、分组
key_by_ds = card_ds.key_by(lambda kv: kv[0])

# 3、划分窗口
window_ds = key_by_ds.window(SlidingProcessingTimeWindows.of(Time.seconds(15), Time.seconds(5)))

# 4、统计车流量
flow_ds = window_ds.reduce(lambda x, y: (x[0], x[1] + y[1]))

# 5、整理数据
result_ds = flow_ds.map(lambda kv: f"{kv[0]},{kv[1]}", output_type=Types.STRING())

# 6、将统计结果保存到kafka中
sink = KafkaSink.builder() \
    .set_bootstrap_servers("master:9092") \
    .set_record_serializer(
    KafkaRecordSerializationSchema.builder()
    .set_topic("card_flow")
    .set_value_serialization_schema(SimpleStringSchema())
    .build()
) \
    .set_delivery_guarantee(DeliveryGuarantee.AT_LEAST_ONCE) \
    .build()

result_ds.sink_to(sink)

env.execute()
