/*
 * Licensed to the Apache Software Foundation (ASF) under one
 * or more contributor license agreements.  See the NOTICE file
 * distributed with this work for additional information
 * regarding copyright ownership.  The ASF licenses this file
 * to you under the Apache License, Version 2.0 (the
 * "License"); you may not use this file except in compliance
 * with the License.  You may obtain a copy of the License at
 *
 *     http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

package org.example;

import org.apache.commons.compress.compressors.gzip.GzipCompressorOutputStream;
import org.apache.commons.lang3.time.DateFormatUtils;
import org.apache.commons.lang3.time.DateUtils;
import org.apache.flink.api.common.RuntimeExecutionMode;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.FilterFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.OpenContext;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.common.io.FilePathFilter;
import org.apache.flink.api.common.serialization.BulkWriter;
import org.apache.flink.api.common.serialization.SimpleStringEncoder;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.configuration.CheckpointingOptions;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.configuration.MemorySize;
import org.apache.flink.configuration.StateBackendOptions;
import org.apache.flink.connector.file.sink.FileSink;
import org.apache.flink.connector.file.src.FileSource;
import org.apache.flink.connector.file.src.FileSourceSplit;
import org.apache.flink.connector.file.src.enumerate.DynamicFileEnumerator;
import org.apache.flink.connector.file.src.enumerate.FileEnumerator;
import org.apache.flink.connector.file.src.reader.TextLineInputFormat;
import org.apache.flink.core.fs.FSDataOutputStream;
import org.apache.flink.core.fs.Path;
import org.apache.flink.shaded.guava32.com.google.common.hash.BloomFilter;
import org.apache.flink.shaded.guava32.com.google.common.hash.Funnels;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.KeyedProcessFunction;
import org.apache.flink.streaming.api.functions.sink.filesystem.OutputFileConfig;
import org.apache.flink.streaming.api.functions.sink.filesystem.rollingpolicies.DefaultRollingPolicy;
import org.apache.flink.streaming.api.functions.windowing.ProcessWindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.GlobalWindows;
import org.apache.flink.streaming.api.windowing.windows.GlobalWindow;
import org.apache.flink.util.Collector;

import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.time.Duration;
import java.util.*;

/**
 * Skeleton for a Flink DataStream Job.
 *
 * <p>For a tutorial how to write a Flink application, check the
 * tutorials and examples on the <a href="https://flink.apache.org">Flink Website</a>.
 *
 * <p>To package your application into a JAR file for execution, run
 * 'mvn clean package' on the command line.
 *
 * <p>If you change the name of the main class (with the public static void main(String[] args))
 * method, change the respective entry in the POM.xml file (simply search for 'mainClass').
 */
public class DataStreamJob {

	public static void main(String[] args) throws Exception {
		// 设置执行环境
		StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
		 env.setRuntimeMode(RuntimeExecutionMode.BATCH);

		// 开启checkpoint，每5分钟一次，精确一次模式
//		env.enableCheckpointing(300000);
		Configuration config = new Configuration();

		String stateBackendPath = "s3://aws-origin-data-bucket-20250112/state";
//		config.set(CheckpointingOptions.CHECKPOINT_STORAGE, "filesystem");
//		config.set(CheckpointingOptions.CHECKPOINTS_DIRECTORY, stateBackendPath);
//		config.set(StateBackendOptions.STATE_BACKEND, "rocksdb");

//		env.setParallelism(2);
		// 输入路径（根据日期动态生成）
//        String inputPath = "s3://aws-origin-data-bucket-20250112/test/" + getCurrentDatePath();
//		String inputPath = "s3://aws-origin-data-bucket-20250112/test/2025-05-03/usa/android/2025-05-03-00.android.usa.us/hygeia-4fe824d0-8dea-404b-a5ec-665d2680674f-1.log.gz";
//		String inputPath = "s3://aws-origin-data-bucket-20250112/test/2025-05-03/usa/android/2025-05-03-00.android.usa.us";

		Path[] paths = buildInputPaths(1);

		// 输出路径（根据日期动态生成）
		String outputPath = "s3://aws-origin-data-bucket-20250112/distinct/" + getCurrentDatePath();

		// 读取S3上的所有.gz文件
		FileSource<String> fileSource = FileSource.forRecordStreamFormat(
				new TextLineInputFormat(), paths
		).build();

		DataStreamSource<String> inputData = env.fromSource(
				fileSource,
				WatermarkStrategy.noWatermarks(),
				"s3-input"
		);

		// 处理数据：按设备ID分组，保留最新时间戳的数据
		DataStream<DeviceInfo> deduplicatedStream = inputData
				// 将每行数据解析为DeviceInfo对象
				.map(new ParseLogLineMapper())
				// 过滤掉解析失败的行
				.filter(new ValidDeviceInfoFilter())
				// 按设备ID分组
				.keyBy(DeviceInfo::getDeviceId)
				.reduce((prev, curr) -> {
					if (curr.getTimestamp() > prev.getTimestamp()) {
						return curr;
					} else {
						return prev;
					}
				})
		;

		// 配置输出文件格式和滚动策略
		OutputFileConfig outputFileConfig = OutputFileConfig
				.builder()
				.withPartPrefix("device_id_distinct")
				.withPartSuffix(".log")
				.build();

		// 将结果写入S3，每100万条数据生成一个新文件
		final FileSink<String> sink = FileSink
				.forRowFormat(new Path(outputPath), new SimpleStringEncoder<String>("UTF-8"))
				.withRollingPolicy(
						DefaultRollingPolicy.builder()
								.withRolloverInterval(Duration.ofMinutes(1))
								.withInactivityInterval(Duration.ofMinutes(1))
								.withMaxPartSize(MemorySize.ofMebiBytes(256))
								.build())
				.withOutputFileConfig(outputFileConfig)
				.build();

		// 将处理后的数据写入目标位置
		deduplicatedStream.map(new MapFunction<DeviceInfo, String>() {
					@Override
					public String map(DeviceInfo deviceInfo) throws Exception {
						String join = String.join("@", Arrays.asList(
								deviceInfo.getGeo(), deviceInfo.getOs(), deviceInfo.getPublisher(),
								deviceInfo.getDeviceId(), deviceInfo.getBrand(), deviceInfo.getUserAgent(),
								deviceInfo.getIp(), deviceInfo.getLanguage(), String.valueOf(deviceInfo.getTimestamp() * 1000),
								deviceInfo.getOsVersion(), deviceInfo.getBundle(), deviceInfo.getModel(), deviceInfo.getConnectionType()));
						return join;
					}
				})
//				.print();
                .sinkTo(sink);

		// 执行作业
		env.execute("Device ID Deduplication Job");
	}

	private static Path[] buildInputPaths(int day) {
		Date now = new Date();
		List<Path> paths = new ArrayList<>();
		for (int i = 1; i <= day; i++) {
			String format = DateFormatUtils.format(DateUtils.addDays(now, -i), "yyyy-MM-dd");
//			String fullPath = "s3://aws-origin-data-bucket-20250112/test/" + format + "/usa/android" + "/" + format + "-00.android.usa.us";
			String fullPath = "s3://aws-origin-data-bucket-20250112/test/" + format + "/usa/android";
			paths.add(new Path(fullPath));
		}
		return paths.toArray(new Path[0]);
	}

	/**
	 * 解析日志行的映射函数
	 */
	public static class ParseLogLineMapper implements MapFunction<String, DeviceInfo> {
		@Override
		public DeviceInfo map(String logLine) {
			try {
				String[] fields = logLine.split("@");
				if (fields.length != 13) {
					return null;
				}

				return new DeviceInfo(
						fields[0],
						fields[1],
						fields[2],
						fields[3],
						fields[4],
						fields[5],
						fields[6],
						fields[7],
						Long.parseLong(fields[8]),  // timestamp
						fields[9],
						fields[10],
						fields[11],
						fields[12]
				);
			} catch (Exception e) {
				return null;
			}
		}
	}

	public static class DeviceIdProcessWindowFunction extends ProcessWindowFunction<DeviceInfo, DeviceInfo, String, GlobalWindow> {
		@Override
		public void process(String s, ProcessWindowFunction<DeviceInfo, DeviceInfo, String, GlobalWindow>.Context context, Iterable<DeviceInfo> elements, Collector<DeviceInfo> out) throws Exception {
			// 输出时间戳最大的
			DeviceInfo deviceInfo = null;
			for (DeviceInfo element : elements) {
				if (deviceInfo == null || element.getTimestamp() > deviceInfo.getTimestamp()) {
					deviceInfo = element;
				}
			}
			out.collect(deviceInfo);
		}
	}
	/**
	 * 过滤有效DeviceInfo的过滤器
	 */
	public static class ValidDeviceInfoFilter implements FilterFunction<DeviceInfo> {
		@Override
		public boolean filter(DeviceInfo deviceInfo) {
			return deviceInfo != null && deviceInfo.getDeviceId() != null && deviceInfo.getDeviceId().length() == 36;
		}
	}

	/**
	 * KeyedProcessFunction用于保留每个设备ID最新的记录
	 */
	public static class DeviceIdDeduplicateWithBloomFilter extends KeyedProcessFunction<String, DeviceInfo, DeviceInfo> {
		private transient BloomFilter<String> bloomFilter;
		private ValueState<DeviceInfo> deviceInfoState;


		@Override
		public void open(OpenContext openContext) throws Exception {
			super.open(openContext);
			System.out.println("创建布隆过滤器");
			bloomFilter = BloomFilter.create(Funnels.stringFunnel(StandardCharsets.UTF_8), 10000, 0.01);
			ValueStateDescriptor<DeviceInfo> descriptor = new ValueStateDescriptor<>("deviceInfo", DeviceInfo.class);
			deviceInfoState = getRuntimeContext().getState(descriptor);
		}

		@Override
		public void processElement(DeviceInfo value, KeyedProcessFunction<String, DeviceInfo, DeviceInfo>.Context ctx, Collector<DeviceInfo> out) throws Exception {
			if (bloomFilter.mightContain(value.getDeviceId())) {
				DeviceInfo storedDeviceInfo = deviceInfoState.value();
				System.out.println("布隆过滤器判定存在");
				if (storedDeviceInfo == null || value.getTimestamp() > storedDeviceInfo.getTimestamp()) {
					if (storedDeviceInfo == null) {
						System.out.println("状态判定不存在");
					}
					System.out.println("更新" + value.getDeviceId() + "时间戳为" + value.getTimestamp());
					deviceInfoState.update(value);
					out.collect(value);
				} else {
					System.out.println("已存在" + value.getDeviceId() + "时间戳为" + storedDeviceInfo.getDeviceId() + "当前数据时间戳" + value.getTimestamp());
				}
			} else {
				System.out.println("布隆过滤器判定不存在");
				bloomFilter.put(value.getDeviceId());
				deviceInfoState.update(value);
				out.collect((value));
			}
		}
	}

	/**
	 * 获取当前日期路径（格式：yyyyMMdd）
	 */
	private static String getCurrentDatePath() {
		return DateFormatUtils.format(new Date(), "yyyyMMdd");
	}
}
