from pyflink.common import SimpleStringSchema, WatermarkStrategy, Types
from pyflink.datastream import StreamExecutionEnvironment, CheckpointingMode, ExternalizedCheckpointCleanup, \
    HashMapStateBackend, EmbeddedRocksDBStateBackend, KeyedProcessFunction, RuntimeContext
from pyflink.datastream.connectors.kafka import KafkaSource, KafkaOffsetsInitializer
from pyflink.datastream.state import ValueStateDescriptor

# 1、创建flink执行环境
env = StreamExecutionEnvironment.get_execution_environment()

# 每 1000ms 开始一次 checkpoint
env.enable_checkpointing(20000)

# 高级选项：

# 设置模式为精确一次 (这是默认值)
env.get_checkpoint_config().set_checkpointing_mode(CheckpointingMode.EXACTLY_ONCE)

# 确认 checkpoints 之间的时间会进行 500 ms
env.get_checkpoint_config().set_min_pause_between_checkpoints(500)

# Checkpoint 必须在一分钟内完成，否则就会被抛弃
env.get_checkpoint_config().set_checkpoint_timeout(60000)

# 允许两个连续的 checkpoint 错误
env.get_checkpoint_config().set_tolerable_checkpoint_failure_number(2)

# 同一时间只允许一个 checkpoint 进行
env.get_checkpoint_config().set_max_concurrent_checkpoints(1)

# 使用 externalized checkpoints，这样 checkpoint 在作业取消后仍就会被保留
env.get_checkpoint_config().enable_externalized_checkpoints(
    ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION)

# 指定状态保存的位置
# HashMapStateBackend： 先将状态保存在TaskManager的内存中，当checkpoint的时候会将状态持久化到HDFS
# EmbeddedRocksDBStateBackend: 先将状态保存在TaskManager所在节点的磁盘上，当checkpoint的时候会将状态持久化到HDFS
env.set_state_backend(HashMapStateBackend())
# env.set_state_backend(EmbeddedRocksDBStateBackend())

# 设置checkpoint保存快照的位置
env.get_checkpoint_config().set_checkpoint_storage_dir("hdfs://master:9000/flink/checkpoints")

source = KafkaSource.builder() \
    .set_bootstrap_servers("master:9092") \
    .set_topics("checkpoint") \
    .set_group_id("my-group") \
    .set_starting_offsets(KafkaOffsetsInitializer.earliest()) \
    .set_value_only_deserializer(SimpleStringSchema()) \
    .build()

kafka_ds = env.from_source(source, WatermarkStrategy.no_watermarks(), "Kafka Source")

words_ds = kafka_ds.flat_map(lambda x: x.split(","), output_type=Types.STRING())

key_by_ds = words_ds.key_by(lambda x: x, key_type=Types.STRING())

class CountProcessFuntion(KeyedProcessFunction):

    def __init__(self):
        self.count_state = None

    def open(self, runtime_context: RuntimeContext):
        self.count_state = runtime_context.get_state(ValueStateDescriptor('count', Types.INT()))

    def process_element(self, value, ctx: 'KeyedProcessFunction.Context'):
        count = self.count_state.value()
        if count is None:
            count = 0
        count += 1

        self.count_state.update(count)

        yield value, count

count_ds = key_by_ds.process(CountProcessFuntion())

count_ds.print()

env.execute()
