"""Flink实时清洗逻辑（去重、格式标准化）

负责对实时数据流进行清洗，包括：
- 数据去重（基于ID或内容）
- 格式标准化（统一时间格式、字段命名等）
- 数据过滤（移除无效或不符合要求的数据）
- 基本校验（数据完整性检查）
"""
import org.apache.flink.api.common.functions.MapFunction
import org.apache.flink.api.common.serialization.SimpleStringSchema
import org.apache.flink.streaming.api.scala._
import org.apache.flink.streaming.api.windowing.time.Time
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer
import org.apache.flink.streaming.api.functions.windowing.WindowFunction
import org.apache.flink.streaming.api.windowing.windows.TimeWindow
import org.apache.flink.util.Collector
import org.apache.flink.api.java.tuple.Tuple
import org.apache.flink.api.java.utils.ParameterTool

import org.json4s._
import org.json4s.jackson.JsonMethods._
import java.text.SimpleDateFormat
import java.util.{Properties, Date, UUID}
import scala.util.Try
import scala.collection.JavaConverters._

/**
  * 清洗后的数据结构
  */
case class CleanedData(
  id: String,              // 唯一标识符
  content: String,         // 文本内容
  source: String,          // 数据源
  publishTime: Long,       // 发布时间戳
  author: Option[String],  // 作者（可选）
  url: Option[String],     // 来源URL（可选）
  cleanedTime: Long,       // 清洗时间戳
  metadata: Map[String, String]  // 元数据
)

/**
  * Flink数据清洗器
  */
object FlinkDataCleaner {
  // JSON解析器隐式值
  implicit val formats: DefaultFormats.type = DefaultFormats
  // 时间格式解析器
  val dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss")

  def main(args: Array[String]): Unit = {
    // 加载配置参数
    val params = ParameterTool.fromArgs(args)
    val env = StreamExecutionEnvironment.getExecutionEnvironment
    
    // 设置并行度
    env.setParallelism(params.getInt("parallelism", 4))
    
    // 配置Kafka消费者
    val consumerProps = new Properties()
    consumerProps.setProperty("bootstrap.servers", params.get("kafka.bootstrap.servers", "localhost:9092"))
    consumerProps.setProperty("group.id", params.get("kafka.group.id", "flink-cleaner-group"))
    consumerProps.setProperty("auto.offset.reset", params.get("kafka.auto.offset.reset", "earliest"))
    
    // 从Kafka读取原始数据
    val rawStream: DataStream[String] = env.addSource(
      new FlinkKafkaConsumer[String](
        params.get("kafka.input.topic", "raw_finance_data"),
        new SimpleStringSchema(),
        consumerProps
      )
    )
    
    // 解析JSON数据
    val parsedStream: DataStream[Either[String, CleanedData]] = rawStream.map(new JsonParserMapFunction())
    
    // 过滤出解析成功的数据
    val validStream: DataStream[CleanedData] = parsedStream
      .filter(_.isRight)
      .map(_.right.get)
    
    // 数据去重 - 使用时间窗口去重
    val deduplicatedStream: DataStream[CleanedData] = validStream
      .keyBy("id")
      .timeWindow(Time.minutes(5))
      .apply(new DeduplicateWindowFunction())
    
    // 数据标准化处理
    val cleanedStream: DataStream[CleanedData] = deduplicatedStream.map(new DataStandardizerMapFunction())
    
    // 转换为JSON字符串
    val resultStream: DataStream[String] = cleanedStream.map(new JsonSerializerMapFunction())
    
    // 配置Kafka生产者
    val producerProps = new Properties()
    producerProps.setProperty("bootstrap.servers", params.get("kafka.bootstrap.servers", "localhost:9092"))
    
    // 将清洗后的数据写入Kafka
    resultStream.addSink(
      new FlinkKafkaProducer[String](
        params.get("kafka.output.topic", "cleaned_finance_data"),
        new SimpleStringSchema(),
        producerProps
      )
    )
    
    // 启动作业
    env.execute("Finance Data Cleaning Job")
  }

  /**
    * JSON解析MapFunction
    */
  class JsonParserMapFunction extends MapFunction[String, Either[String, CleanedData]] {
    override def map(value: String): Either[String, CleanedData] = {
      Try {
        val json = parse(value)
        
        // 提取必要字段
        val id = (json \ "id").extractOpt[String].getOrElse(UUID.randomUUID().toString)
        val content = (json \ "content").extract[String]
        val source = (json \ "source").extractOpt[String].getOrElse("unknown")
        
        // 处理发布时间
        val publishTime = (json \ "publishTime").extractOpt[Long].getOrElse {
          (json \ "publishTime").extractOpt[String].map {
            dateStr => dateFormat.parse(dateStr).getTime
          }.getOrElse(System.currentTimeMillis())
        }
        
        val author = (json \ "author").extractOpt[String]
        val url = (json \ "url").extractOpt[String]
        
        // 提取元数据
        val metadata = (json \ "metadata").extractOpt[Map[String, String]].getOrElse(Map.empty)
        
        // 构建清洗后的数据对象
        CleanedData(
          id = id,
          content = content,
          source = source,
          publishTime = publishTime,
          author = author,
          url = url,
          cleanedTime = System.currentTimeMillis(),
          metadata = metadata
        )
      }.toEither.left.map(e => s"解析错误: ${e.getMessage}")
    }
  }

  /**
    * 数据去重WindowFunction
    */
  class DeduplicateWindowFunction extends WindowFunction[CleanedData, CleanedData, Tuple, TimeWindow] {
    override def apply(
      key: Tuple,
      window: TimeWindow,
      input: java.lang.Iterable[CleanedData],
      out: Collector[CleanedData]): Unit = {
      // 只保留第一个出现的元素（去重）
      val firstElement = input.iterator().next()
      out.collect(firstElement)
    }
  }

  /**
    * 数据标准化MapFunction
    */
  class DataStandardizerMapFunction extends MapFunction[CleanedData, CleanedData] {
    override def map(value: CleanedData): CleanedData = {
      // 文本内容标准化
      val standardizedContent = value.content
        .trim()
        .replaceAll("\\s+", " ") // 替换多个空格为单个空格
        .replaceAll("[\\r\\n]", " ") // 替换换行符为空格
      
      // 标准化来源名称
      val standardizedSource = value.source.toLowerCase.trim
      
      // 构建标准化后的数据对象
      value.copy(
        content = standardizedContent,
        source = standardizedSource,
        cleanedTime = System.currentTimeMillis()
      )
    }
  }

  /**
    * JSON序列化MapFunction
    */
  class JsonSerializerMapFunction extends MapFunction[CleanedData, String] {
    override def map(value: CleanedData): String = {
      // 将CleanedData对象转换为JSON字符串
      compact(render(
        Extraction.decompose(value)
      ))
    }
  }
}