package student.convert

import org.apache.flink.api.common.serialization.SimpleStringSchema
import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.ObjectMapper
import org.apache.flink.streaming.api.scala._
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer
import util.Eastmoney.StockData

import java.util.Properties

object KafkaSource {
  def main(args: Array[String]): Unit = {
    val env = StreamExecutionEnvironment.getExecutionEnvironment
    val properties = new Properties()
    val objectMapper = new ObjectMapper()
    properties.setProperty("bootstrap.servers","node1:9092")
    properties.setProperty("group.id","flink-group")
    env.addSource(new FlinkKafkaConsumer[String]("bigdata",new SimpleStringSchema(),properties))
      .map(jsonString => parseJsonToStockData(objectMapper, jsonString))
      .map(data=>(data.name,1))
      .keyBy(_._1)
      .sum(1)
      .filter(_._2<=1)
      .print()
    env.execute()
  }
  // 解析 JSON 字符串到 StockData 样例类的辅助函数
  private def parseJsonToStockData(mapper: ObjectMapper, jsonString: String): StockData = {
    val jsonNode = mapper.readTree(jsonString)

    StockData(
      jsonNode.get("index").asInt(),
      jsonNode.get("code").asText(),
      jsonNode.get("name").asText(),
      jsonNode.get("related_links").asText(),
      jsonNode.get("latest_price").asDouble(),
      jsonNode.get("change_percentage").asText(),
      jsonNode.get("change_amount").asDouble(),
      jsonNode.get("volume_hands").asLong(),
      jsonNode.get("turnover_yuan").asText(),
      jsonNode.get("amplitude").asText(),
      jsonNode.get("high").asDouble(),
      jsonNode.get("low").asDouble(),
      jsonNode.get("open_price").asDouble(),
      jsonNode.get("previous_close").asDouble(),
      jsonNode.get("turnover_rate").asText(),
      jsonNode.get("pe_ratio_dynamic").asDouble(),
      jsonNode.get("pb_ratio").asDouble(),
      jsonNode.get("writeTime").asLong(),
      jsonNode.get("updateTime").asLong()
    )
  }
}
