package com.codejiwei.flink.table.redis.option

import org.apache.flink.configuration.{ConfigOption, ConfigOptions}

import java.lang

/**
 * Author: codejiwei
 * Date: 2022/7/7
 * Desc:  
 * */
object RtsqlRedisOptions {
  val UPDATE_MODE: ConfigOption[String] = ConfigOptions.key("update-mode")
    .stringType()
    .defaultValue("append")
    .withDescription("Options update mode fixed to append")

  val BNS: ConfigOption[String] = ConfigOptions.key("connector.bns")
    .stringType()
    .noDefaultValue()
    .withDescription("Options for connect redis with bns way")

  val PASSWORD: ConfigOption[String] = ConfigOptions.key("connector.password")
    .stringType()
    .noDefaultValue()
    .withDescription("Option of password for connect redis")

  val FILTER: ConfigOption[lang.Boolean] = ConfigOptions.key("connector.filter")
    .booleanType()
    .defaultValue(true)
    .withDescription("Whether to filter Codis nodes")

  /**
  val METADATA_URL = "connector.metadata.url"
  val METADATA_RETRIES = "connector.metadata.retries"
  val METADATA_CACHE_PATH = "connector.metadata.cache"
  val ENV_NAME = "connector.env"
  val DATABASE_NAME = "connector.database"
  val TABLE_NAME = "connector.table"
  val TABLE_VERSION = "connector.table.version"
  val TABLE_FORMAT = "connector.format"
  val TABLE_MAX_NUMBER = "connector.number"
  val INDEX_FIELDS = "connector.index.fields"
  val PRIMARY_KEY = "connector.primary"
  val BUFFER_SIZE = "connector.buffer.size"
  val IS_JOIN = "connector.source.join"
  val MAX_ROWS_EACH_REQ = "connector.source.max_rows_each_req"
  val MAX_ROWS_EACH_INDEX = "connector.source.max_rows_each_index"
  val FLUSH_INTERVAL = "connector.buffer.flush.interval"
  val KAFKA_BOOTSTRAP = "connector.kafka.bootstraps"
  val KAFKA_TOPIC = "connector.kafka.topic"
  val KAFKA_KEY = "connector.kafka.key"
  val TTL = "connector.redis.ttl"
  val FILE_MODE = "connector.file.mode"
   * */

}
