package com.caul.demo.hadoop.hdfs.client;

import com.caul.demo.hadoop.hdfs.Cmd;
import com.caul.demo.hadoop.hdfs.model.MetaData;
import com.caul.demo.hadoop.hdfs.model.NameNodeReq;

import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.Socket;
import java.util.Arrays;

/**
 * Created by BlueDream on 2018-03-27.
 */
public class UploadClient {

  public static void main(String[] args) throws IOException {
    System.out.println("启动上传客户端:-------");

    String dir = Cmd.DATA_PATH;
    String fileName = "demo.log";
    File file = new File(dir + fileName);
    if (!file.exists()) {
      throw new RuntimeException("文件不存在" + file.getPath());
    }

    //与namenode通讯,請求上传文件,返回blockid和datanode节点
    MetaData nameNodeResp = getUploadInfo(fileName, file.length());
    //链接datanode,上传文件
    long blockStart = 0;
    for (MetaData.BlockData blockData : nameNodeResp.getBlocks()) {
      blockData.setBlockStart(blockStart);
      uploadData(file, blockData);
      blockStart += blockData.getBlockLen();
    }
  }

  /**
   * 与namenode通讯,請求上传文件
   * 返回blockid和datanode节点
   *
   * @return
   * @throws IOException
   */
  private static MetaData getUploadInfo(String fileName, long fileLen) throws IOException {
    Socket nameNode = new Socket(Cmd.NAMENODE_IP, Cmd.NAMENODE_PORT);
    OutputStream nnOut = nameNode.getOutputStream();
    InputStream nnIn = nameNode.getInputStream();

    try {
      nnOut.write(new NameNodeReq(fileName, fileLen).value().getBytes());
      nnOut.flush();

      byte[] bts = new byte[1024];
      int read = nnIn.read(bts);

      return MetaData.of(new String(bts, 0, read));
    } finally {
      nnIn.close();
      nnOut.close();
      nameNode.close();
    }
  }

  private static void uploadData(File file, MetaData.BlockData blockData) throws IOException {
    Socket dataNode = new Socket(blockData.getDataNode(), Cmd.DATANODE_PORT);
    OutputStream dnOut = dataNode.getOutputStream();
    InputStream dnIn = dataNode.getInputStream();
    FileInputStream fis = new FileInputStream(file);

    try {
      //告诉服务端,开始传递数据
      dnOut.write(Arrays
          .copyOf(String.format(Cmd.DN_UPLOAD_REQ, Cmd.UPLOAD, blockData.getBlockId()).getBytes(),
              Cmd.DATANODE_CMD_LEN));
      dnOut.flush();

      //跳到指定开始位置
      fis.skip(blockData.getBlockStart());
      //写入该block数据
      byte[] bts = new byte[4096];
      long leftLen = blockData.getBlockLen();
      int read;
      do {
        read = Long.valueOf(Math.min(leftLen, bts.length)).intValue();
        fis.read(bts, 0, read);
        dnOut.write(bts, 0, read);
        dnOut.flush();
        leftLen -= read;
      } while (leftLen > 0);
      //读取返回的信息
      //      read = dnIn.read(bts);
      //      System.out.println(new String(bts, 0, read));
    } finally {
      fis.close();
      dnIn.close();
      dnOut.close();
      dataNode.close();
    }
  }


}
