/*
 * Copyright 2017 StreamSets Inc.
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *     http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */
package com.streamsets.datacollector.bundles;

import com.amazonaws.util.Base64;
import org.apache.commons.lang3.StringUtils;

public final class Constants {

  // General

  // 操作类型
  public static final String OP_TYPE_IMPORT = "import";
  public static final String OP_TYPE_EXPORT = "export";

  public static String getTablePreffixByOpType(String opType){
    String prefix = Constants.PROPERTIES_EXPORT_TEMP_TABLE_PREFIX;
    if(Constants.OP_TYPE_IMPORT.equalsIgnoreCase(opType)){
      prefix = Constants.PROPERTIES_OLLECT_TEMP_TABLE_PREFIX;
    }
    return prefix;
  }



  /*
   * 忽略掉不需要的阶段
   */
  public static final String IGNORE_STAGES = "ignore_stages.properties";

  public static String getEtlTemplateNameByWarehouseType(String warehouseType){
    warehouseType = StringUtils.isEmpty(warehouseType) ? "hive" : warehouseType;
    return "template_" + warehouseType + "_etl.json";
  }

  public static String getExportTemplateNameByWarehouseType(String warehouseType){
    warehouseType = StringUtils.isEmpty(warehouseType) ? "hive" : warehouseType;
    return "template_" + warehouseType + "_export.json";
  }
  /*
   * 从hive导出到mysql的模板名称
   */
//    public static final String PIPELINE_TEMPLATE_NAME_OF_HIVE_EXPORT = "template_hive_export.json";

  /*
   * 导入hive增量ETL
   */
//    public static final String PIPELINE_TEMPLATE_NAME_OF_HIVE_ETL = "template_hive_etl.json";

  /*
   * 导入hive增量模板
   */
  public static final String PIPELINE_TEMPLATE_NAME_OF_HIVE_INC = "template_hive_inc.json";

  /*
   * 导入hive全量模板
   */
  public static final String PIPELINE_TEMPLATE_NAME_OF_HIVE_FULL = "template_hive_full.json";

  /*
   * 导入hive模板
   */
  public static final String DEFAULT_PIPELINE_CONFIG_NAME = "default_pipeline_config.properties";

  /*
   * 数仓类型
   */
  public static final String PROPERTIES_WAREHOUSE_WAREHOUSE_TYPE = "warehouse.type";

  /*
   * hive的jdbc连接
   */
  public static final String PROPERTIES_WAREHOUSE_JDBC_URL_NAME = "warehouse.jdbc.url";

  /*
   * hive的username
   */
  public static final String PROPERTIES_WAREHOUSE_JDBC_USERNAME = "warehouse.jdbc.username";

  /*
   * hive的password
   */
  public static final String PROPERTIES_WAREHOUSE_JDBC_PASSWORD = "warehouse.jdbc.password";

  /*
   * hive的驱动
   */
  public static final String PROPERTIES_WAREHOUSE_JDBC_DRIVER = "warehouse.jdbc.driver";

  /*
   * hive的数据库
   */
  public static final String PROPERTIES_WAREHOUSE_JDBC_SCHEMA = "warehouse.jdbc.schema";

  /*
   * impala的jdbc连接
   */
  public static final String PROPERTIES_IMPALA_JDBC_URL_NAME = "impala.jdbc.url";

  /*
   * hive的驱动
   */
  public static final String PROPERTIES_IMPALA_JDBC_DRIVER = "impala.jdbc.driver";

  /*
   * hive的数据库
   */
  public static final String PROPERTIES_IMPALA_JDBC_SCHEMA = "impala.jdbc.schema";

  /*
   * IMPALA的username
   */
  public static final String PROPERTIES_IMPALA_JDBC_USERNAME = "impala.jdbc.username";

  /*
   * IMPALA的password
   */
  public static final String PROPERTIES_IMPALA_JDBC_PASSWORD = "impala.jdbc.password";

  /*
   * 采集临时表前缀
   */
  public static final String PROPERTIES_OLLECT_TEMP_TABLE_PREFIX = "collect_tmp_";

  /*
   * 导出临时表前缀
   */
  public static final String PROPERTIES_EXPORT_TEMP_TABLE_PREFIX = "export_tmp_";


  // General

  /**
   * Configuration file that will be search in SDC_CONF for redactor configuration.
   */
  public static final String REDACTOR_CONFIG = "support-bundle-redactor.json";

  public static final String CUSTOMER_ID_FILE = "customer.id";
  public static final String DEFAULT_CUSTOMER_ID = "";

  /**
   * Uploading support bundles directly
   */
  public static final String UPLOAD_ENABLED = "bundle.upload.enabled";
  public static final boolean DEFAULT_UPLOAD_ENABLED = true;

  public static final String UPLOAD_BUCKET = "bundle.upload.bucket";
  public static final String DEFAULT_UPLOAD_BUCKET = "customer-support-bundles";

  public static final String UPLOAD_ACCESS = "bundle.upload.access";
  public static final String DEFAULT_UPLOAD_ACCESS = new String(Base64.decode("QUtJQUpaRzQzUExFSFNMNktMU0E="));

  public static final String UPLOAD_SECRET = "bundle.upload.secret";
  public static final String DEFAULT_UPLOAD_SECRET = new String(Base64.decode("MUFaNW1VSHNlbXJmYTd3TGNzWkRUdGJieitQNlI0bVVDSGVUMHVYbg=="));

  public static final String UPLOAD_BUFFER_SIZE = "bundle.upload.buffer_size";
  public static final int DEFAULT_UPLOAD_BUFFER_SIZE = 5 * 1024 * 1024; // 5MB, amazon lower limit

  public static final String UPLOAD_ON_ERROR = "bundle.upload.on_error";
  public static final boolean DEFAULT_UPLOAD_ON_ERROR = false;

  // Log Generator

  /**
   * 2GB of raw logs is equal to roughly ~70MB after zip compression (on real life logs)
   */
  public static final String LOG_MAX_SIZE = "bundle.log.max_size";
  public static final long DEFAULT_LOG_MAX_SIZE = 2L * (1024 * 1024 * 1024);

  /**
   * For GC, we want last ~50 MBs (random constant at this point).
   */
  public static final String LOG_GC_MAX_SIZE = "bundle.log.gc_max_size";
  public static final long DEFAULT_LOG_GC_MAX_SIZE = (50 * 1024 * 1024);

  /**
   * For JVM FATAL error file, we want last ~10 MBs (random constant at this point).
   */
  public static final String LOG_HS_MAX_SIZE = "bundle.log.hs_max_size";
  public static final long DEFAULT_LOG_HS_MAX_SIZE = (10 * 1024 * 1024);

  // Pipeline Generator

  /**
   * Redaction regular expression for pipeline configuration keys
   */
  public static final String PIPELINE_REDACT_REGEXP = "bundle.pipeline.redact_regexp";
  public static final String DEFAULT_PIPELINE_REDACT_REGEXP = "(.*[Pp]assword.*|.*AccessKey.*)";
}
