"""Flink实时特征提取（实体密度、时间特征）

负责从清洗后的数据中提取特征，包括：
- 实体密度特征（各种实体类型在文本中的出现频率）
- 时间特征（发布时间的各种衍生特征）
- 文本统计特征（长度、词频等）
- 来源特征（数据源的可信度、权威性等）
"""
import org.apache.flink.api.common.functions.MapFunction
import org.apache.flink.api.common.serialization.SimpleStringSchema
import org.apache.flink.streaming.api.scala._
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer
import org.apache.flink.api.java.utils.ParameterTool

import org.json4s._
import org.json4s.jackson.JsonMethods._
import java.util.{Properties, Calendar}
import scala.util.Try
import scala.math.log1p

/**
  * 实体密度特征
  */
case class EntityDensityFeatures(
  companyDensity: Double,  // 公司实体密度
  personDensity: Double,   // 人物实体密度
  productDensity: Double,  // 产品实体密度
  eventDensity: Double,    // 事件实体密度
  locationDensity: Double, // 地点实体密度
  timeDensity: Double      // 时间实体密度
)

/**
  * 时间特征
  */
case class TimeFeatures(
  hourOfDay: Int,          // 一天中的小时
  dayOfWeek: Int,          // 一周中的第几天
  isWeekend: Boolean,      // 是否周末
  isMarketOpen: Boolean,   // 是否在交易时间内
  timeSincePublication: Long // 距离发布时间的时长（秒）
)

/**
  * 提取的特征数据
  */
case class FeatureData(
  id: String,              // 数据唯一ID
  content: String,         // 原始文本内容
  source: String,          // 数据源
  publishTime: Long,       // 发布时间戳
  entityDensity: EntityDensityFeatures, // 实体密度特征
  timeFeatures: TimeFeatures,           // 时间特征
  textLength: Int,         // 文本长度
  wordCount: Int,          // 词数量
  avgWordLength: Double,   // 平均词长度
  sourceReliability: Double, // 来源可信度得分
  featureExtractionTime: Long // 特征提取时间戳
)

/**
  * Flink特征引擎
  */
object FlinkFeatureEngine {
  // JSON解析器隐式值
  implicit val formats: DefaultFormats.type = DefaultFormats

  def main(args: Array[String]): Unit = {
    // 加载配置参数
    val params = ParameterTool.fromArgs(args)
    val env = StreamExecutionEnvironment.getExecutionEnvironment
    
    // 设置并行度
    env.setParallelism(params.getInt("parallelism", 4))
    
    // 配置Kafka消费者
    val consumerProps = new Properties()
    consumerProps.setProperty("bootstrap.servers", params.get("kafka.bootstrap.servers", "localhost:9092"))
    consumerProps.setProperty("group.id", params.get("kafka.group.id", "flink-feature-engine-group"))
    consumerProps.setProperty("auto.offset.reset", params.get("kafka.auto.offset.reset", "earliest"))
    
    // 从Kafka读取清洗后的数据
    val cleanedStream: DataStream[String] = env.addSource(
      new FlinkKafkaConsumer[String](
        params.get("kafka.input.topic", "cleaned_finance_data"),
        new SimpleStringSchema(),
        consumerProps
      )
    )
    
    // 解析JSON数据
    val parsedStream: DataStream[Either[String, CleanedData]] = cleanedStream.map(new JsonParserMapFunction())
    
    // 过滤出解析成功的数据
    val validStream: DataStream[CleanedData] = parsedStream
      .filter(_.isRight)
      .map(_.right.get)
    
    // 提取实体密度特征
    val entityDensityStream: DataStream[(CleanedData, EntityDensityFeatures)] = 
      validStream.map(new EntityDensityExtractorMapFunction())
    
    // 提取时间特征
    val timeFeatureStream: DataStream[(CleanedData, EntityDensityFeatures, TimeFeatures)] = 
      entityDensityStream.map(new TimeFeatureExtractorMapFunction())
    
    // 提取文本统计特征
    val featureStream: DataStream[FeatureData] = 
      timeFeatureStream.map(new TextFeatureExtractorMapFunction())
    
    // 转换为JSON字符串
    val resultStream: DataStream[String] = featureStream.map(new JsonSerializerMapFunction())
    
    // 配置Kafka生产者
    val producerProps = new Properties()
    producerProps.setProperty("bootstrap.servers", params.get("kafka.bootstrap.servers", "localhost:9092"))
    
    // 将特征数据写入Kafka
    resultStream.addSink(
      new FlinkKafkaProducer[String](
        params.get("kafka.output.topic", "featured_finance_data"),
        new SimpleStringSchema(),
        producerProps
      )
    )
    
    // 启动作业
    env.execute("Finance Data Feature Engineering Job")
  }

  /**
    * 清洗数据的样例类（与FlinkDataCleaner保持一致）
    */
  case class CleanedData(
    id: String,
    content: String,
    source: String,
    publishTime: Long,
    author: Option[String],
    url: Option[String],
    cleanedTime: Long,
    metadata: Map[String, String]
  )

  /**
    * JSON解析MapFunction
    */
  class JsonParserMapFunction extends MapFunction[String, Either[String, CleanedData]] {
    override def map(value: String): Either[String, CleanedData] = {
      Try {
        val json = parse(value)
        
        // 提取CleanedData对象
        val id = (json \ "id").extract[String]
        val content = (json \ "content").extract[String]
        val source = (json \ "source").extract[String]
        val publishTime = (json \ "publishTime").extract[Long]
        val author = (json \ "author").extractOpt[String]
        val url = (json \ "url").extractOpt[String]
        val cleanedTime = (json \ "cleanedTime").extract[Long]
        val metadata = (json \ "metadata").extract[Map[String, String]]
        
        CleanedData(
          id = id,
          content = content,
          source = source,
          publishTime = publishTime,
          author = author,
          url = url,
          cleanedTime = cleanedTime,
          metadata = metadata
        )
      }.toEither.left.map(e => s"解析错误: ${e.getMessage}")
    }
  }

  /**
    * 实体密度提取MapFunction
    */
  class EntityDensityExtractorMapFunction extends MapFunction[CleanedData, (CleanedData, EntityDensityFeatures)] {
    override def map(value: CleanedData): (CleanedData, EntityDensityFeatures) = {
      val content = value.content
      val totalLength = content.length.toDouble
      
      // 从元数据中提取实体信息（实际应用中可能需要调用实体识别模型）
      val entities = extractEntitiesFromMetadata(value.metadata)
      
      // 计算各种实体的数量
      val companyCount = entities.getOrElse("company", 0)
      val personCount = entities.getOrElse("person", 0)
      val productCount = entities.getOrElse("product", 0)
      val eventCount = entities.getOrElse("event", 0)
      val locationCount = entities.getOrElse("location", 0)
      val timeCount = entities.getOrElse("time", 0)
      
      // 计算实体密度（对数转换以平滑极端值）
      val companyDensity = if (totalLength > 0) log1p(companyCount) / log1p(totalLength) else 0.0
      val personDensity = if (totalLength > 0) log1p(personCount) / log1p(totalLength) else 0.0
      val productDensity = if (totalLength > 0) log1p(productCount) / log1p(totalLength) else 0.0
      val eventDensity = if (totalLength > 0) log1p(eventCount) / log1p(totalLength) else 0.0
      val locationDensity = if (totalLength > 0) log1p(locationCount) / log1p(totalLength) else 0.0
      val timeDensity = if (totalLength > 0) log1p(timeCount) / log1p(totalLength) else 0.0
      
      val densityFeatures = EntityDensityFeatures(
        companyDensity = companyDensity,
        personDensity = personDensity,
        productDensity = productDensity,
        eventDensity = eventDensity,
        locationDensity = locationDensity,
        timeDensity = timeDensity
      )
      
      (value, densityFeatures)
    }
    
    private def extractEntitiesFromMetadata(metadata: Map[String, String]): Map[String, Int] = {
      // 从元数据中提取实体计数
      Map(
        "company" -> metadata.get("company_count").map(_.toInt).getOrElse(0),
        "person" -> metadata.get("person_count").map(_.toInt).getOrElse(0),
        "product" -> metadata.get("product_count").map(_.toInt).getOrElse(0),
        "event" -> metadata.get("event_count").map(_.toInt).getOrElse(0),
        "location" -> metadata.get("location_count").map(_.toInt).getOrElse(0),
        "time" -> metadata.get("time_count").map(_.toInt).getOrElse(0)
      )
    }
  }

  /**
    * 时间特征提取MapFunction
    */
  class TimeFeatureExtractorMapFunction extends 
      MapFunction[(CleanedData, EntityDensityFeatures), (CleanedData, EntityDensityFeatures, TimeFeatures)] {
    override def map(value: (CleanedData, EntityDensityFeatures)): 
        (CleanedData, EntityDensityFeatures, TimeFeatures) = {
      val (cleanedData, densityFeatures) = value
      val publishTime = cleanedData.publishTime
      
      val calendar = Calendar.getInstance()
      calendar.setTimeInMillis(publishTime)
      
      // 提取时间特征
      val hourOfDay = calendar.get(Calendar.HOUR_OF_DAY)
      val dayOfWeek = calendar.get(Calendar.DAY_OF_WEEK)
      val isWeekend = dayOfWeek == Calendar.SATURDAY || dayOfWeek == Calendar.SUNDAY
      
      // 判断是否在交易时间内（假设9:30-15:00为交易时间）
      val isMarketOpen = hourOfDay >= 9 && (hourOfDay < 15 || (hourOfDay == 15 && calendar.get(Calendar.MINUTE) == 0))
      
      // 计算距离发布时间的时长
      val timeSincePublication = (System.currentTimeMillis() - publishTime) / 1000
      
      val timeFeatures = TimeFeatures(
        hourOfDay = hourOfDay,
        dayOfWeek = dayOfWeek,
        isWeekend = isWeekend,
        isMarketOpen = isMarketOpen,
        timeSincePublication = timeSincePublication
      )
      
      (cleanedData, densityFeatures, timeFeatures)
    }
  }

  /**
    * 文本特征提取MapFunction
    */
  class TextFeatureExtractorMapFunction extends 
      MapFunction[(CleanedData, EntityDensityFeatures, TimeFeatures), FeatureData] {
    override def map(value: (CleanedData, EntityDensityFeatures, TimeFeatures)): FeatureData = {
      val (cleanedData, densityFeatures, timeFeatures) = value
      val content = cleanedData.content
      
      // 计算文本统计特征
      val textLength = content.length
      val words = content.split("\\s+")
      val wordCount = words.length
      val avgWordLength = if (wordCount > 0) words.map(_.length).sum.toDouble / wordCount else 0.0
      
      // 计算来源可信度得分（示例实现）
      val sourceReliability = calculateSourceReliability(cleanedData.source)
      
      FeatureData(
        id = cleanedData.id,
        content = cleanedData.content,
        source = cleanedData.source,
        publishTime = cleanedData.publishTime,
        entityDensity = densityFeatures,
        timeFeatures = timeFeatures,
        textLength = textLength,
        wordCount = wordCount,
        avgWordLength = avgWordLength,
        sourceReliability = sourceReliability,
        featureExtractionTime = System.currentTimeMillis()
      )
    }
    
    private def calculateSourceReliability(source: String): Double = {
      // 示例实现：根据来源名称给予不同的可信度得分
      source.toLowerCase match {
        case "xinhua" | "people" | "reuters" | "bloomberg" => 0.9
        case "cctv" | "cnn" | "bbc" => 0.85
        case "financialtimes" | "wsj" => 0.8
        case _ => 0.7 // 默认可信度
      }
    }
  }

  /**
    * JSON序列化MapFunction
    */
  class JsonSerializerMapFunction extends MapFunction[FeatureData, String] {
    override def map(value: FeatureData): String = {
      // 将FeatureData对象转换为JSON字符串
      compact(render(
        Extraction.decompose(value)
      ))
    }
  }
}