package com.ydl.learning.flink.demo

import java.util.Properties
import org.apache.flink.api.common.serialization.SimpleStringSchema
import org.apache.flink.api.scala.{DataSet, ExecutionEnvironment}
import org.apache.flink.streaming.api.scala.{DataStream, StreamExecutionEnvironment}
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer

/**
 *
 *
 * @author ydl
 * @since 2020/10/10
 */
trait Utils {
  val dataDemo = List((1, "a", 1.2), (2, "b", 2.1), (1, "a", 1.1))
  val env: ExecutionEnvironment = ExecutionEnvironment.getExecutionEnvironment
  val streamEnv: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment

  //cmd nc -lk 9999
  def getSocketTextStream(prot: Int): (StreamExecutionEnvironment, DataStream[String]) = {
    (streamEnv, streamEnv.socketTextStream("localhost", prot, '\n'))
  }

  def readFile(path: String): DataSet[String] = {
    env.readTextFile(path)
  }


  val kafkaProperties = new Properties()
  kafkaProperties.put("bootstrap.servers", "10.50.162.207:9092")
  kafkaProperties.put("group.id", "flink_test")
  kafkaProperties.put("auto.offset.reset", "latest")
  kafkaProperties.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer")
  kafkaProperties.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer")
  val consumer: FlinkKafkaConsumer[String] = new FlinkKafkaConsumer[String]("test", new SimpleStringSchema(), kafkaProperties)
}
