package com.caul.demo.hadoop.hdfs;

/**
 * Created by BlueDream on 2018-03-27.
 */
public class Cmd {

  /**
   * 主节点地址
   */
  public static final String NAMENODE_IP = "localhost";
  /**
   * 主节点端口
   */
  public static final int NAMENODE_PORT = 9000;
  /**
   * 数据节点端口
   */
  public static final int DATANODE_PORT = 9900;
  /**
   * 数据目录
   */
  public static final String DATA_PATH = "E:/ws-idea/bigdata/data/";

  public static final String SPLIT_CMD = ":";

  public static final String SPLIT_META = ";";

  public static final String SPLIT_BLOCK = ",";

  public static final String UPLOAD = "upload";

  public static final String DOWNLOAD = "download";

  public static final int DATANODE_CMD_LEN = 128;

  public static final int BLOCK_SIZE = 100 * 1024;
  /**
   * cmd:fileName:fileLen
   */
  public static final String NN_UPLOAD_REQ = "%s" + Cmd.SPLIT_CMD + "%s" + Cmd.SPLIT_CMD + "%d";
  /**
   * fileName:blockId|datanode|blockLen,...
   */
  public static final String NN_UPLOAD_RESP = "%s" + Cmd.SPLIT_CMD + "%s";
  /**
   * blockId|datanode|blockLen
   */
  public static final String INFO_BLOCK = "%s" + Cmd.SPLIT_BLOCK + "%s" + Cmd.SPLIT_BLOCK + "%d";
  /**
   * cmd:blockId
   */
  public static final String DN_UPLOAD_REQ = "%s" + Cmd.SPLIT_CMD + "%s";
  /**
   * blockId:success/false
   */
  public static final String DN_DOWNLOAD_REQ = "%s" + Cmd.SPLIT_CMD + "%s";

}
