import time
from typing import Any

from pyflink.common import SimpleStringSchema, WatermarkStrategy, Time
from pyflink.common.watermark_strategy import TimestampAssigner
from pyflink.datastream import StreamExecutionEnvironment
from pyflink.datastream.connectors.kafka import KafkaSource, KafkaOffsetsInitializer
from pyflink.datastream.window import TumblingEventTimeWindows



env = StreamExecutionEnvironment.get_execution_environment()

env.set_parallelism(1)

source = KafkaSource.builder().set_bootstrap_servers("master:9092").set_topics("event_time_word_partition").set_group_id(
    "my-group").set_starting_offsets(KafkaOffsetsInitializer.latest()).set_value_only_deserializer(
    SimpleStringSchema()).build()

lines_ds = env.from_source(source, WatermarkStrategy.no_watermarks(), "Kafka Source")

def  map_fun(line):
    split = line.split(',')
    word = split[0]
    date = split[1]
    ts = int(time.mktime(time.strptime(date, "%Y-%m-%d %H:%M:%S")) * 1000)
    return word, ts

ts_ds = lines_ds.map(map_fun)

class MyTimestampAssigner(TimestampAssigner):

    def extract_timestamp(self, value: Any, record_timestamp: int) -> int:
        return value[1]


ass_ds = ts_ds.assign_timestamps_and_watermarks(
    WatermarkStrategy.for_monotonous_timestamps().with_timestamp_assigner(MyTimestampAssigner()))

kv_ds = ass_ds.map(lambda kv: (kv[0], 1))

kv_by_ds = kv_ds.key_by(lambda kv: kv[0])

window_ds = kv_by_ds.window(TumblingEventTimeWindows.of(Time.seconds(5)))

count_ds = window_ds.reduce(lambda value1, value2: (value1[0], value1[1] + value2[1]))

count_ds.print()

env.execute()