package net.bwie.flink;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

/**
 * 从Kafka消费数据，实时获取json数据，并解析和提取字段
 * @author xuanyu
 * @date 2025/10/16
 */
public class _06FlinkSourceKafkaDemo {

	public static void main(String[] args) throws Exception{
		// 1.执行环境-env
		StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
		env.setParallelism(1);

		// 2. 数据源-source
		KafkaSource<String> source = KafkaSource.<String>builder()
			.setBootstrapServers("node101:9092,node102:9092,node103:9092")
			.setTopics("mysql-topic")
			.setGroupId("my-group-1")
			.setStartingOffsets(OffsetsInitializer.earliest())
			.setValueOnlyDeserializer(new SimpleStringSchema())
			.build();
		DataStreamSource<String> stream = env.fromSource(source, WatermarkStrategy.noWatermarks(), "Kafka Source");

		// 3. 数据转换-transformation
		/*
			解析json，提取字段值
				{
				    "before": null,
				    "after": {
				        "id": 5,
				        "name": "ergou",
				        "sex": "male"
				    },
				    "source": {
				        "version": "1.9.7.Final",
				        "connector": "mysql",
				        "name": "mysql_binlog_source",
				        "ts_ms": 0,
				        "snapshot": "false",
				        "db": "db_test",
				        "sequence": null,
				        "table": "tbl_users",
				        "server_id": 0,
				        "gtid": null,
				        "file": "",
				        "pos": 0,
				        "row": 0,
				        "thread": null,
				        "query": null
				    },
				    "op": "r",
				    "ts_ms": 1760597589139,
				    "transaction": null
				}
		 */
		SingleOutputStreamOperator<String> stream1 = stream.map(
			new RichMapFunction<String, String>() {
				@Override
				public String map(String value) throws Exception {
					// 第1层解析
					JSONObject jsonObject = JSON.parseObject(value);
					// 第2层解析：source
					JSONObject jsonObjectSource = jsonObject.getJSONObject("source");

					// 提取字段值：op 操作类型 和 ts 时间戳
					String opValue = jsonObject.getString("op");
					Long tsMsValue = jsonObject.getLong("ts_ms");
					// 提取字段值：db 数据库和table 表
					String dbValue = jsonObjectSource.getString("db");
					String tableValue = jsonObjectSource.getString("table");

					// 获取操作数据，当after有值时获取，没值获取before
					String data = jsonObject.getString("after");
					if(null == data){
						data = jsonObject.getString("before");
					}

					// 组合json字符串
					JSONObject object = new JSONObject() ;
					object.put("operate_type", opValue);
					object.put("operate_ts", tsMsValue);
					object.put("db_name", dbValue);
					object.put("table_name", tableValue);
					object.put("operate_data", data);

					return object.toJSONString();
				}
			}
		);

		// 4. 数据接收器-sink
		stream1.print();

		// 5. 触发执行
		env.execute("FlinkSourceKafkaDemo");
	}

}
