package com.tcs.start.job.impl;

import java.io.BufferedInputStream;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.URI;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;

import com.tcs.start.job.HdfsJob;

public class HdfsJobImpl implements HdfsJob {
	
	private int buffSize = 4096;

	@Override
	public void run() {
		try {
			String source="/usr/local/filecontent/demo";//linux中的文件路徑,demo存在一定数据
			String destination="hdfs://neusoft-master:9000/data/test";//HDFS的路徑
			InputStream in = new BufferedInputStream(new FileInputStream(source));
			//HDFS读写的配置文件
			Configuration conf = new Configuration();
			//调用Filesystem的create方法返回的是FSDataOutputStream对象
			//该对象不允许在文件中定位，因为HDFS只允许一个已打开的文件顺序写入或追加
			FileSystem fs = FileSystem.get(URI.create(destination),conf);
			OutputStream out = fs.create(new Path(destination));
			IOUtils.copyBytes(in, out, buffSize, true);
		} catch (FileNotFoundException e) {
			e.printStackTrace();
		} catch (IllegalArgumentException e) {
			e.printStackTrace();
		} catch (IOException e) {
			e.printStackTrace();
		}
	}
}